Compare commits

..

No commits in common. "alpha" and "v0.0.22-alpha" have entirely different histories.

146 changed files with 5887 additions and 7298 deletions

View file

@ -2,11 +2,6 @@ name: Acceptance Testing
description: "Acceptance Testing pipeline" description: "Acceptance Testing pipeline"
inputs: inputs:
test_timeout_string:
description: "string that determines the timeout (default: 45m)"
default: '45m'
required: true
go-version: go-version:
description: "go version to install" description: "go version to install"
default: '1.25' default: '1.25'
@ -16,78 +11,38 @@ inputs:
description: "STACKIT project ID for tests" description: "STACKIT project ID for tests"
required: true required: true
project_user_email:
required: true
description: "project user email for acc testing"
tf_acc_kek_key_id:
description: "KEK key ID"
required: true
tf_acc_kek_key_ring_id:
description: "KEK key ring ID"
required: true
tf_acc_kek_key_version:
description: "KEK key version"
required: true
tf_acc_kek_service_account:
description: "KEK service account email"
required: true
region: region:
description: "STACKIT region for tests" description: "STACKIT region for tests"
default: 'eu01' default: 'eu01'
required: true required: true
service_account_json_content: service_account_json:
description: "STACKIT service account JSON file contents" description: "STACKIT service account JSON file contents"
required: true required: true
default: ""
service_account_json_content_b64:
description: "STACKIT service account JSON file contents"
required: true
default: ""
service_account_json_file_path:
description: "STACKIT service account JSON file contents"
required: true
default: 'service_account.json'
test_file: test_file:
description: "testfile to run" description: "testfile to run"
default: '' default: ''
outputs:
#outputs: random-number:
# random-number: description: "Random number"
# description: "Random number" value: ${{ steps.random-number-generator.outputs.random-number }}
# value: ${{ steps.random-number-generator.outputs.random-number }}
runs: runs:
using: "composite" using: "composite"
steps: steps:
# - name: Random Number Generator - name: Random Number Generator
# id: random-number-generator id: random-number-generator
# run: echo "random-number=$(echo $RANDOM)" >> $GITHUB_OUTPUT run: echo "random-number=$(echo $RANDOM)" >> $GITHUB_OUTPUT
# shell: bash shell: bash
- name: Install needed tools - name: Install needed tools
shell: bash shell: bash
run: | run: |
echo "::group::apt install"
set -e set -e
apt-get -y -qq update >apt_update.log 2>apt_update_err.log apt-get -y -qq update
if [ $? -ne 0 ]; then apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget
cat apt_update.log apt_update_err.log
fi
apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget >apt_get.log 2>apt_get_err.log
if [ $? -ne 0 ]; then
cat apt_get.log apt_get_err.log
fi
echo "::endgroup::"
- name: Setup JAVA - name: Setup JAVA
uses: actions/setup-java@v5 uses: actions/setup-java@v5
@ -98,165 +53,62 @@ runs:
- name: Install Go ${{ inputs.go-version }} - name: Install Go ${{ inputs.go-version }}
uses: actions/setup-go@v6 uses: actions/setup-go@v6
with: with:
# go-version: ${{ inputs.go-version }} go-version: ${{ inputs.go-version }}
check-latest: true check-latest: true
go-version-file: 'go.mod' go-version-file: 'go.mod'
- name: Determine GOMODCACHE
shell: bash
id: goenv
run: |
set -e
echo "gomodcache=$(go env GOMODCACHE)" >> "$GITHUB_OUTPUT"
- name: Restore cached GO pkg
id: cache-gopkg
uses: actions/cache/restore@v5
with:
path: "${{ steps.goenv.outputs.gomodcache }}"
key: ${{ runner.os }}-gopkg
- name: Install go tools - name: Install go tools
if: steps.cache-gopkg.outputs.cache-hit != 'true'
shell: bash shell: bash
run: | run: |
echo "::group::go install"
set -e set -e
go mod download go mod download
go install golang.org/x/tools/cmd/goimports@latest go install golang.org/x/tools/cmd/goimports@latest
go install github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework@latest go install github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.7.2
go install github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi@latest go install github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs@v0.24.0
go install github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs@latest
go install github.com/golangci/golangci-lint/v2/cmd/golangci-lint@latest
echo "::endgroup::"
- name: Run go mod tidy
shell: bash
run: go mod tidy
- name: Save GO package Cache - name: Prepare pkg_gen directory
id: cache-gopkg-save
uses: actions/cache/save@v5
with:
path: |
${{ steps.goenv.outputs.gomodcache }}
key: ${{ runner.os }}-gopkg
- name: Creating service_account file from json input
if: inputs.service_account_json_content != ''
shell: bash shell: bash
run: | run: |
echo "::group::create service account file" go run cmd/main.go build -p
set -e
set -o pipefail
jsonFile="${{ inputs.service_account_json_file_path }}"
jsonFile="${jsonFile:-x}"
if [ "${jsonFile}" == "x" ]; then
echo "no service account file path provided"
exit 1
fi
if [ ! -f "${jsonFile}" ]; then
echo "creating service account file '${{ inputs.service_account_json_file_path }}'"
echo "${{ inputs.service_account_json_content }}" > stackit/"${{ inputs.service_account_json_file_path }}"
fi
ls -l stackit/"${{ inputs.service_account_json_file_path }}"
echo "::endgroup::"
- name: Creating service_account file from base64 json input
if: inputs.service_account_json_content_b64 != ''
shell: bash
run: |
echo "::group::create service account file"
set -e
set -o pipefail
jsonFile="${{ inputs.service_account_json_file_path }}"
jsonFile="${jsonFile:-x}"
if [ "${jsonFile}" == "x" ]; then
echo "no service account file path provided"
exit 1
fi
if [ ! -f "${jsonFile}" ]; then
echo "creating service account file '${{ inputs.service_account_json_file_path }}'"
echo "${{ inputs.service_account_json_content_b64 }}" | base64 -d > stackit/"${{ inputs.service_account_json_file_path }}"
fi
ls -l stackit/"${{ inputs.service_account_json_file_path }}"
echo "::endgroup::"
- name: Run acceptance test file - name: Run acceptance test file
if: ${{ inputs.test_file != '' }} if: ${{ inputs.test_file != '' }}
shell: bash shell: bash
run: | run: |
echo "::group::go test file"
set -e
set -o pipefail
echo "Running acceptance tests for the terraform provider" echo "Running acceptance tests for the terraform provider"
cd stackit || exit 1 echo "${STACKIT_SERVICE_ACCOUNT_JSON}" > ~/.service_account.json
cd stackit
TF_ACC=1 \ TF_ACC=1 \
TF_ACC_PROJECT_ID=${TF_ACC_PROJECT_ID} \ TF_ACC_PROJECT_ID=${TF_ACC_PROJECT_ID} \
TF_ACC_REGION=${TF_ACC_REGION} \ TF_ACC_REGION=${TF_ACC_REGION} \
TF_ACC_TEST_PROJECT_USER_EMAIL=${TF_ACC_TEST_PROJECT_USER_EMAIL} \ go test ${{ inputs.test_file }} -count=1 -timeout=30m
TF_ACC_SERVICE_ACCOUNT_FILE="${PWD}/${{ inputs.service_account_json_file_path }}" \
TF_ACC_KEK_KEY_ID=${TF_ACC_KEK_KEY_ID} \
TF_ACC_KEK_KEY_RING_ID=${TF_ACC_KEK_KEY_RING_ID} \
TF_ACC_KEK_KEY_VERSION=${TF_ACC_KEK_KEY_VERSION} \
TF_ACC_KEK_SERVICE_ACCOUNT=${TF_ACC_KEK_SERVICE_ACCOUNT} \
go test ${{ inputs.test_file }} -count=1 -timeout=${{ inputs.test_timeout_string }}
echo "::endgroup::"
env: env:
TF_ACC_PROJECT_ID: ${{ inputs.project_id }} STACKIT_SERVICE_ACCOUNT_JSON: ${{ inputs.service_account_json }}
TF_PROJECT_ID: ${{ inputs.project_id }}
TF_ACC_REGION: ${{ inputs.region }} TF_ACC_REGION: ${{ inputs.region }}
TF_ACC_TEST_PROJECT_USER_EMAIL: ${{ inputs.project_user_email }} # TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL: ${{ secrets.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL }}
TF_ACC_KEK_KEY_ID: ${{ inputs.tf_acc_kek_key_id }} # TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN: ${{ secrets.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN }}
TF_ACC_KEK_KEY_RING_ID: ${{ inputs.tf_acc_kek_key_ring_id }} # TF_ACC_TEST_PROJECT_PARENT_CONTAINER_ID: ${{ secrets.TF_ACC_TEST_PROJECT_PARENT_CONTAINER_ID }}
TF_ACC_KEK_KEY_VERSION: ${{ inputs.tf_acc_kek_key_version }} # TF_ACC_TEST_PROJECT_PARENT_UUID: ${{ secrets.TF_ACC_TEST_PROJECT_PARENT_UUID }}
TF_ACC_KEK_SERVICE_ACCOUNT: ${{ inputs.tf_acc_kek_service_account }} # TF_ACC_TEST_PROJECT_USER_EMAIL: ${{ secrets.TF_ACC_TEST_PROJECT_USER_EMAIL }}
# - name: Run test action
# if: ${{ inputs.test_file == '' }}
# env:
# TF_ACC: 1
# TF_ACC_PROJECT_ID: ${{ inputs.project_id }}
# TF_ACC_REGION: ${{ inputs.region }}
# TF_ACC_TEST_PROJECT_USER_EMAIL: ${{ inputs.project_user_email }}
# TF_ACC_KEK_KEY_ID: ${{ inputs.tf_acc_kek_key_id }}
# TF_ACC_KEK_KEY_RING_ID: ${{ inputs.tf_acc_kek_key_ring_id }}
# TF_ACC_KEK_KEY_VERSION: ${{ inputs.tf_acc_kek_key_version }}
# TF_ACC_KEK_SERVICE_ACCOUNT: ${{ inputs.tf_acc_kek_service_account }}
# TF_ACC_SERVICE_ACCOUNT_FILE: "${PWD}/${{ inputs.service_account_json_file_path }}"
# uses: robherley/go-test-action@v0.1.0
# with:
# testArguments: "./... -timeout 45m"
- name: Run acceptance tests - name: Run acceptance tests
if: ${{ inputs.test_file == '' }} if: ${{ inputs.test_file == '' }}
shell: bash shell: bash
run: | run: |
echo "::group::go test all"
set -e
set -o pipefail
echo "Running acceptance tests for the terraform provider" echo "Running acceptance tests for the terraform provider"
cd stackit || exit 1 echo "${STACKIT_SERVICE_ACCOUNT_JSON}" > ~/.service_account.json
cd stackit
TF_ACC=1 \ TF_ACC=1 \
TF_ACC_PROJECT_ID=${TF_ACC_PROJECT_ID} \ TF_ACC_PROJECT_ID=${TF_ACC_PROJECT_ID} \
TF_ACC_REGION=${TF_ACC_REGION} \ TF_ACC_REGION=${TF_ACC_REGION} \
TF_ACC_TEST_PROJECT_USER_EMAIL=${TF_ACC_TEST_PROJECT_USER_EMAIL} \ go test ./... -count=1 -timeout=30m
TF_ACC_SERVICE_ACCOUNT_FILE="${PWD}/${{ inputs.service_account_json_file_path }}" \
TF_ACC_KEK_KEY_ID=${TF_ACC_KEK_KEY_ID} \
TF_ACC_KEK_KEY_RING_ID=${TF_ACC_KEK_KEY_RING_ID} \
TF_ACC_KEK_KEY_VERSION=${TF_ACC_KEK_KEY_VERSION} \
TF_ACC_KEK_SERVICE_ACCOUNT=${TF_ACC_KEK_SERVICE_ACCOUNT} \
go test ./... -count=1 -timeout=${{ inputs.test_timeout_string }}
echo "::endgroup::"
env: env:
TF_ACC_PROJECT_ID: ${{ inputs.project_id }} STACKIT_SERVICE_ACCOUNT_JSON: ${{ inputs.service_account_json }}
TF_PROJECT_ID: ${{ inputs.project_id }}
TF_ACC_REGION: ${{ inputs.region }} TF_ACC_REGION: ${{ inputs.region }}
TF_ACC_TEST_PROJECT_USER_EMAIL: ${{ inputs.project_user_email }} # TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL: ${{ secrets.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL }}
TF_ACC_KEK_KEY_ID: ${{ inputs.tf_acc_kek_key_id }} # TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN: ${{ secrets.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN }}
TF_ACC_KEK_KEY_RING_ID: ${{ inputs.tf_acc_kek_key_ring_id }} # TF_ACC_TEST_PROJECT_PARENT_CONTAINER_ID: ${{ secrets.TF_ACC_TEST_PROJECT_PARENT_CONTAINER_ID }}
TF_ACC_KEK_KEY_VERSION: ${{ inputs.tf_acc_kek_key_version }} # TF_ACC_TEST_PROJECT_PARENT_UUID: ${{ secrets.TF_ACC_TEST_PROJECT_PARENT_UUID }}
TF_ACC_KEK_SERVICE_ACCOUNT: ${{ inputs.tf_acc_kek_service_account }} # TF_ACC_TEST_PROJECT_USER_EMAIL: ${{ secrets.TF_ACC_TEST_PROJECT_USER_EMAIL }}

View file

@ -20,63 +20,25 @@ runs:
run: | run: |
set -e set -e
apt-get -y -qq update apt-get -y -qq update
apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget unzip bc apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget
- name: Checkout
uses: actions/checkout@v6
- name: Install Go ${{ inputs.go-version }} - name: Install Go ${{ inputs.go-version }}
uses: actions/setup-go@v6 uses: actions/setup-go@v6
with: with:
# go-version: ${{ inputs.go-version }} go-version: ${{ inputs.go-version }}
check-latest: true check-latest: true
go-version-file: 'go.mod' go-version-file: 'go.mod'
- name: Determine GOMODCACHE
shell: bash
id: goenv
run: |
set -e
# echo "::set-output name=gomodcache::$(go env GOMODCACHE)"
echo "gomodcache=$(go env GOMODCACHE)" >> "$GITHUB_OUTPUT"
- name: Restore cached GO pkg
id: cache-gopkg
uses: actions/cache/restore@v5
with:
path: "${{ steps.goenv.outputs.gomodcache }}"
key: ${{ runner.os }}-gopkg
- name: Install go tools - name: Install go tools
if: steps.cache-gopkg.outputs.cache-hit != 'true'
shell: bash shell: bash
run: | run: |
set -e set -e
go install golang.org/x/tools/cmd/goimports@latest go install golang.org/x/tools/cmd/goimports@latest
go install github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework@latest go install github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework@latest
go install github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi@latest go install github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi@latest
go install github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs@latest go install github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs@v0.24.0
# - name: Run build pkg directory
# shell: bash
# run: |
# set -e
# go run generator/main.go build
- name: Get all go packages
if: steps.cache-gopkg.outputs.cache-hit != 'true'
shell: bash
run: |
set -e
go get ./...
- name: Save Cache
id: cache-gopkg-save
uses: actions/cache/save@v5
with:
path: |
${{ steps.goenv.outputs.gomodcache }}
key: ${{ runner.os }}-gopkg
- name: Setup JAVA ${{ inputs.java-distribution }} ${{ inputs.go-version }} - name: Setup JAVA ${{ inputs.java-distribution }} ${{ inputs.go-version }}
uses: actions/setup-java@v5 uses: actions/setup-java@v5
@ -84,6 +46,16 @@ runs:
distribution: ${{ inputs.java-distribution }} # See 'Supported distributions' for available options distribution: ${{ inputs.java-distribution }} # See 'Supported distributions' for available options
java-version: ${{ inputs.java-version }} java-version: ${{ inputs.java-version }}
- name: Checkout
uses: actions/checkout@v6
- name: Run build pkg directory
shell: bash
run: |
set -e
go run cmd/main.go build
- name: Run make to build app - name: Run make to build app
shell: bash shell: bash
run: | run: |

View file

@ -26,9 +26,9 @@ runs:
uses: https://code.forgejo.org/actions/setup-go@v6 uses: https://code.forgejo.org/actions/setup-go@v6
id: go-version id: go-version
with: with:
# go-version: ${{ inputs.go-version }} go-version: ${{ inputs.go-version }}
check-latest: true # Always check for the latest patch release check-latest: true # Always check for the latest patch release
go-version-file: "go.mod" # go-version-file: "go.mod"
# do not cache dependencies, we do this manually # do not cache dependencies, we do this manually
cache: false cache: false

View file

@ -22,39 +22,6 @@ env:
CODE_COVERAGE_ARTIFACT_NAME: "code-coverage" CODE_COVERAGE_ARTIFACT_NAME: "code-coverage"
jobs: jobs:
runner_test:
name: "Test STACKIT runner"
runs-on: stackit-docker
steps:
- name: Install needed tools
run: |
apt-get -y -qq update
apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget
- name: Setup Go
uses: actions/setup-go@v6
with:
go-version: ${{ env.GO_VERSION }}
- name: Install go tools
run: |
go install golang.org/x/tools/cmd/goimports@latest
go install github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework@latest
go install github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi@latest
- name: Setup JAVA
uses: actions/setup-java@v5
with:
distribution: 'temurin' # See 'Supported distributions' for available options
java-version: '21'
- name: Checkout
uses: actions/checkout@v6
- name: Run build pkg directory
run: |
go run cmd/main.go build
publish_test: publish_test:
name: "Test readiness for publishing provider" name: "Test readiness for publishing provider"
needs: config needs: config

View file

@ -1,343 +0,0 @@
name: CI Workflow
on:
pull_request:
branches:
- alpha
- main
workflow_dispatch:
schedule:
# every sunday at 00:00
# - cron: '0 0 * * 0'
# every day at 00:00
- cron: '0 0 * * *'
push:
branches:
- '!main'
- '!alpha'
paths:
- '!.github'
env:
GO_VERSION: "1.25"
CODE_COVERAGE_FILE_NAME: "coverage.out" # must be the same as in Makefile
CODE_COVERAGE_ARTIFACT_NAME: "code-coverage"
jobs:
config:
if: ${{ github.event_name != 'schedule' }}
name: Check GoReleaser config
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v6
- name: Check GoReleaser
uses: goreleaser/goreleaser-action@v7
with:
args: check
prepare:
name: Prepare GO cache
runs-on: ubuntu-latest
permissions:
actions: read # Required to identify workflow run.
checks: write # Required to add status summary.
contents: read # Required to checkout repository.
pull-requests: write # Required to add PR comment.
steps:
- name: Checkout
uses: actions/checkout@v6
- name: Install Go ${{ inputs.go-version }}
id: go-install
uses: actions/setup-go@v6
with:
# go-version: ${{ inputs.go-version }}
check-latest: true
go-version-file: 'go.mod'
- name: Determine GOMODCACHE
shell: bash
id: goenv
run: |
set -e
# echo "::set-output name=gomodcache::$(go env GOMODCACHE)"
echo "gomodcache=$(go env GOMODCACHE)" >> "$GITHUB_OUTPUT"
- name: Restore cached GO pkg
id: cache-gopkg
uses: actions/cache/restore@v5
with:
path: "${{ steps.goenv.outputs.gomodcache }}"
key: ${{ runner.os }}-gopkg
- name: Install go tools
if: steps.cache-gopkg.outputs.cache-hit != 'true'
run: |
go install golang.org/x/tools/cmd/goimports@latest
go install github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework@latest
go install github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi@latest
- name: Get all go packages
if: steps.cache-gopkg.outputs.cache-hit != 'true'
shell: bash
run: |
set -e
go get ./...
- name: Save Cache
if: steps.cache-gopkg.outputs.cache-hit != 'true'
id: cache-gopkg-save
uses: actions/cache/save@v5
with:
path: |
${{ steps.goenv.outputs.gomodcache }}
key: ${{ runner.os }}-gopkg
publish_test:
name: "Test readiness for publishing provider"
needs:
- config
- prepare
runs-on: ubuntu-latest
permissions:
actions: read # Required to identify workflow run.
checks: write # Required to add status summary.
contents: read # Required to checkout repository.
pull-requests: write # Required to add PR comment.
steps:
- name: Install needed tools
run: |
apt-get -y -qq update
apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget unzip bc
- name: Checkout
uses: actions/checkout@v6
- name: Setup Go
uses: actions/setup-go@v6
with:
# go-version: ${{ env.GO_VERSION }}
check-latest: true
go-version-file: 'go.mod'
- name: Install go tools
run: |
go install golang.org/x/tools/cmd/goimports@latest
go install github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework@latest
go install github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi@latest
- name: Setup JAVA
uses: actions/setup-java@v5
with:
distribution: 'temurin' # See 'Supported distributions' for available options
java-version: '21'
# - name: Run build pkg directory
# run: |
# go run generator/main.go build
- name: Set up s3cfg
run: |
cat <<'EOF' >> ~/.s3cfg
[default]
host_base = https://object.storage.eu01.onstackit.cloud
host_bucket = https://%(bucket).object.storage.eu01.onstackit.cloud
check_ssl_certificate = False
access_key = ${{ secrets.S3_ACCESS_KEY }}
secret_key = ${{ secrets.S3_SECRET_KEY }}
EOF
- name: Import GPG key
run: |
echo "${{ secrets.PRIVATE_KEY_PEM }}" > ~/private.key.pem
gpg --import ~/private.key.pem
rm ~/private.key.pem
- name: Run GoReleaser with SNAPSHOT
id: goreleaser
env:
GITHUB_TOKEN: ${{ env.FORGEJO_TOKEN }}
GPG_FINGERPRINT: ${{ secrets.GPG_FINGERPRINT }}
uses: goreleaser/goreleaser-action@v7
with:
args: release --skip publish --clean --snapshot
- name: Prepare key file
run: |
echo "${{ secrets.PUBLIC_KEY_PEM }}" >public_key.pem
- name: Prepare provider directory structure
run: |
VERSION=$(jq -r .version < dist/metadata.json)
go run generator/main.go \
publish \
--namespace=mhenselin \
--providerName=stackitprivatepreview \
--repoName=terraform-provider-stackitprivatepreview \
--domain=tfregistry.sysops.stackit.rocks \
--gpgFingerprint="${{ secrets.GPG_FINGERPRINT }}" \
--gpgPubKeyFile=public_key.pem \
--version=${VERSION}
testing:
name: CI run tests
runs-on: ubuntu-latest
needs:
- config
- prepare
env:
TF_ACC_PROJECT_ID: ${{ vars.TF_ACC_PROJECT_ID }}
TF_ACC_ORGANIZATION_ID: ${{ vars.TF_ACC_ORGANIZATION_ID }}
TF_ACC_REGION: ${{ vars.TF_ACC_REGION }}
TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL: ${{ vars.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL }}
TF_ACC_SERVICE_ACCOUNT_FILE: "~/service_account.json"
steps:
- name: Checkout
uses: actions/checkout@v6
- name: Build
uses: ./.github/actions/build
with:
go-version: ${{ env.GO_VERSION }}
- name: Setup Terraform
uses: hashicorp/setup-terraform@v2
with:
terraform_wrapper: false
- name: Create service account json file
if: ${{ github.event_name == 'pull_request' }}
run: |
echo "${{ secrets.TF_ACC_SERVICE_ACCOUNT_JSON }}" >~/.service_account.json
- name: Run go mod tidy
if: ${{ github.event_name == 'pull_request' }}
run: go mod tidy
- name: Testing
run: |
TF_ACC_SERVICE_ACCOUNT_FILE=~/.service_account.json
export TF_ACC_SERVICE_ACCOUNT_FILE
make test
# - name: Acceptance Testing
# env:
# TF_ACC: "1"
# if: ${{ github.event_name == 'pull_request' }}
# run: |
# TF_ACC_SERVICE_ACCOUNT_FILE=~/.service_account.json
# export TF_ACC_SERVICE_ACCOUNT_FILE
# make test-acceptance-tf
- name: Run Test
if: ${{ github.event_name == 'pull_request' }}
uses: ./.github/actions/acc_test
with:
go-version: ${{ env.GO_VERSION }}
project_id: ${{ vars.TF_ACC_PROJECT_ID }}
region: ${{ vars.TF_ACC_REGION }}
service_account_json_content_b64: "${{ secrets.TF_ACC_SERVICE_ACCOUNT_JSON_B64 }}"
project_user_email: ${{ vars.TEST_PROJECT_USER_EMAIL }}
tf_acc_kek_key_id: ${{ vars.TF_ACC_KEK_KEY_ID }}
tf_acc_kek_key_ring_id: ${{ vars.TF_ACC_KEK_KEY_RING_ID }}
tf_acc_kek_key_version: ${{ vars.TF_ACC_KEK_KEY_VERSION }}
tf_acc_kek_service_account: ${{ vars.TF_ACC_KEK_SERVICE_ACCOUNT }}
# service_account_json_file_path: "~/service_account.json"
- name: Check coverage threshold
shell: bash
run: |
make coverage
COVERAGE=$(go tool cover -func=coverage.out | grep total | awk '{print $3}' | sed 's/%//')
echo "Coverage: $COVERAGE%"
if (( $(echo "$COVERAGE < 80" | bc -l) )); then
echo "Coverage is below 80%"
# exit 1
fi
- name: Archive code coverage results
uses: actions/upload-artifact@v4
with:
name: ${{ env.CODE_COVERAGE_ARTIFACT_NAME }}
path: "stackit/${{ env.CODE_COVERAGE_FILE_NAME }}"
main:
if: ${{ github.event_name != 'schedule' }}
name: CI run build and linting
runs-on: ubuntu-latest
needs:
- config
- prepare
steps:
- name: Checkout
uses: actions/checkout@v6
# - uses: actions/cache@v5
# id: cache
# with:
# path: path/to/dependencies
# key: ${{ runner.os }}-${{ hashFiles('**/lockfiles') }}
# - name: Install Dependencies
# if: steps.cache.outputs.cache-hit != 'true'
# run: /install.sh
- name: Build
uses: ./.github/actions/build
with:
go-version: ${{ env.GO_VERSION }}
- name: Setup Terraform
uses: hashicorp/setup-terraform@v2
with:
terraform_wrapper: false
- name: "Ensure docs are up-to-date"
if: ${{ github.event_name == 'pull_request' }}
run: ./scripts/check-docs.sh
continue-on-error: true
- name: "Run go mod tidy"
if: ${{ github.event_name == 'pull_request' }}
run: go mod tidy
- name: golangci-lint
uses: golangci/golangci-lint-action@v9
with:
version: v2.10
args: --config=.golang-ci.yaml --allow-parallel-runners --timeout=5m
continue-on-error: true
- name: Linting terraform files
run: make lint-tf
continue-on-error: true
code_coverage:
name: "Code coverage report"
if: github.event_name == 'pull_request' # Do not run when workflow is triggered by push to main branch
runs-on: ubuntu-latest
needs:
- main
- prepare
permissions:
contents: read
actions: read # to download code coverage results from "main" job
pull-requests: write # write permission needed to comment on PR
steps:
- name: Install needed tools
shell: bash
run: |
set -e
apt-get -y -qq update
apt-get -y -qq install sudo
- name: Check new code coverage
uses: fgrosse/go-coverage-report@v1.2.0
continue-on-error: true # Add this line to prevent pipeline failures in forks
with:
coverage-artifact-name: ${{ env.CODE_COVERAGE_ARTIFACT_NAME }}
coverage-file-name: ${{ env.CODE_COVERAGE_FILE_NAME }}
root-package: 'github.com/stackitcloud/terraform-provider-stackit'

View file

@ -23,7 +23,7 @@ jobs:
uses: actions/checkout@v6 uses: actions/checkout@v6
- name: Check GoReleaser - name: Check GoReleaser
uses: goreleaser/goreleaser-action@v7 uses: goreleaser/goreleaser-action@v6
with: with:
args: check args: check
@ -44,11 +44,9 @@ jobs:
apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget
- name: Setup Go - name: Setup Go
uses: https://code.forgejo.org/actions/setup-go@v6 uses: actions/setup-go@v6
with: with:
# go-version: ${{ env.GO_VERSION }} go-version: ${{ env.GO_VERSION }}
check-latest: true
go-version-file: 'go.mod'
- name: Install go tools - name: Install go tools
run: | run: |
@ -70,7 +68,7 @@ jobs:
set -e set -e
mkdir -p generated/services mkdir -p generated/services
mkdir -p generated/internal/services mkdir -p generated/internal/services
go run generator/main.go build go run cmd/main.go build
- name: Set up s3cfg - name: Set up s3cfg
run: | run: |
@ -95,7 +93,7 @@ jobs:
env: env:
GITHUB_TOKEN: ${{ env.FORGEJO_TOKEN }} GITHUB_TOKEN: ${{ env.FORGEJO_TOKEN }}
GPG_FINGERPRINT: ${{ secrets.GPG_FINGERPRINT }} GPG_FINGERPRINT: ${{ secrets.GPG_FINGERPRINT }}
uses: goreleaser/goreleaser-action@v7 uses: goreleaser/goreleaser-action@v6
with: with:
args: release --skip publish --clean --snapshot args: release --skip publish --clean --snapshot
@ -105,7 +103,7 @@ jobs:
env: env:
GITHUB_TOKEN: ${{ env.FORGEJO_TOKEN }} GITHUB_TOKEN: ${{ env.FORGEJO_TOKEN }}
GPG_FINGERPRINT: ${{ secrets.GPG_FINGERPRINT }} GPG_FINGERPRINT: ${{ secrets.GPG_FINGERPRINT }}
uses: goreleaser/goreleaser-action@v7 uses: goreleaser/goreleaser-action@v6
with: with:
args: release --skip publish --clean args: release --skip publish --clean
@ -116,7 +114,7 @@ jobs:
- name: Prepare provider directory structure - name: Prepare provider directory structure
run: | run: |
VERSION=$(jq -r .version < dist/metadata.json) VERSION=$(jq -r .version < dist/metadata.json)
go run generator/main.go \ go run cmd/main.go \
publish \ publish \
--namespace=mhenselin \ --namespace=mhenselin \
--providerName=stackitprivatepreview \ --providerName=stackitprivatepreview \

View file

@ -22,19 +22,17 @@ jobs:
with: with:
# Allow goreleaser to access older tag information. # Allow goreleaser to access older tag information.
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-go@v5
- uses: https://code.forgejo.org/actions/setup-go@v6
with: with:
go-version-file: "go.mod" go-version-file: "go.mod"
cache: true cache: true
- name: Import GPG key - name: Import GPG key
uses: crazy-max/ghaction-import-gpg@v6 uses: crazy-max/ghaction-import-gpg@v6
id: import_gpg id: import_gpg
with: with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }} gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
- name: Run GoReleaser - name: Run GoReleaser
uses: goreleaser/goreleaser-action@v7 uses: goreleaser/goreleaser-action@v6
with: with:
args: release --clean args: release --clean
env: env:

View file

@ -1,29 +0,0 @@
name: Runner stats
on:
workflow_dispatch:
jobs:
stats-own:
name: "Get own runner stats"
runs-on: ubuntu-latest
steps:
- name: Install needed tools
run: |
apt-get -y -qq update
apt-get -y -qq install inxi
- name: Show stats
run: inxi -c 0
stats-stackit:
name: "Get STACKIT runner stats"
runs-on: stackit-docker
steps:
- name: Install needed tools
run: |
apt-get -y -qq update
apt-get -y -qq install inxi
- name: Show stats
run: inxi -c 0

View file

@ -18,12 +18,6 @@ jobs:
uses: ./.github/actions/acc_test uses: ./.github/actions/acc_test
with: with:
go-version: ${{ env.GO_VERSION }} go-version: ${{ env.GO_VERSION }}
project_id: ${{ vars.TF_ACC_PROJECT_ID }} project_id: ${{ vars.TEST_PROJECT_ID }}
region: 'eu01' region: 'eu01'
service_account_json_content_b64: "${{ secrets.TF_ACC_SERVICE_ACCOUNT_JSON_B64 }}" service_account_json: ${{ secrets.TF_ACC_SERVICE_ACCOUNT_JSON }}
project_user_email: ${{ vars.TEST_PROJECT_USER_EMAIL }}
tf_acc_kek_key_id: ${{ vars.TF_ACC_KEK_KEY_ID }}
tf_acc_kek_key_ring_id: ${{ vars.TF_ACC_KEK_KEY_RING_ID }}
tf_acc_kek_key_version: ${{ vars.TF_ACC_KEK_KEY_VERSION }}
tf_acc_kek_service_account: ${{ vars.TF_ACC_KEK_SERVICE_ACCOUNT }}
# service_account_json_file_path: "~/service_account.json"

1
.gitignore vendored
View file

@ -40,7 +40,6 @@ coverage.out
coverage.html coverage.html
generated generated
stackit-sdk-generator stackit-sdk-generator
stackit-sdk-generator/**
dist dist
.secrets .secrets

View file

@ -1,94 +0,0 @@
version: "2"
run:
concurrency: 4
output:
formats:
text:
print-linter-name: true
print-issued-lines: true
colors: true
path: stdout
linters:
enable:
- bodyclose
- depguard
- errorlint
- forcetypeassert
- gochecknoinits
- gocritic
- gosec
- misspell
- nakedret
- revive
- sqlclosecheck
- wastedassign
disable:
- noctx
- unparam
settings:
depguard:
rules:
main:
list-mode: lax
allow:
- tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview
- github.com/hashicorp/terraform-plugin-framework
- github.com/hashicorp/terraform-plugin-log
- github.com/stackitcloud/stackit-sdk-go
deny:
- pkg: github.com/stretchr/testify
desc: Do not use a testing framework
gocritic:
disabled-checks:
- wrapperFunc
- typeDefFirst
- ifElseChain
- dupImport
- hugeParam
enabled-tags:
- performance
- style
- experimental
gosec:
excludes:
- G104
- G102
- G304
- G307
misspell:
locale: US
nakedret:
max-func-lines: 0
revive:
severity: error
rules:
- name: errorf
- name: context-as-argument
- name: error-return
- name: increment-decrement
- name: indent-error-flow
- name: superfluous-else
- name: unused-parameter
- name: unreachable-code
- name: atomic
- name: empty-lines
- name: early-return
exclusions:
paths:
- generator/
generated: lax
warn-unused: true
# Excluding configuration per-path, per-linter, per-text and per-source.
rules:
# Exclude some linters from running on tests files.
- path: _test\.go
linters:
- gochecknoinits
formatters:
enable:
#- gofmt
- goimports
settings:
goimports:
local-prefixes:
- tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview

View file

@ -12,20 +12,17 @@ project-tools:
# LINT # LINT
lint-golangci-lint: lint-golangci-lint:
@echo "Linting with golangci-lint" @echo "Linting with golangci-lint"
@go run github.com/golangci/golangci-lint/v2/cmd/golangci-lint run --fix --config .golang-ci.yaml @$(SCRIPTS_BASE)/lint-golangci-lint.sh
lint-tf: lint-tf:
@echo "Linting terraform files" @echo "Linting terraform files"
@terraform fmt -check -diff -recursive examples/ @terraform fmt -check -diff -recursive
@terraform fmt -check -diff -recursive stackit/
lint: lint-golangci-lint lint-tf lint: lint-golangci-lint lint-tf
# DOCUMENTATION GENERATION # DOCUMENTATION GENERATION
generate-docs: generate-docs:
@echo "Generating documentation with tfplugindocs" @echo "Generating documentation with tfplugindocs"
@$(SCRIPTS_BASE)/tfplugindocs.sh @$(SCRIPTS_BASE)/tfplugindocs.sh
build: build:

956
cmd/cmd/build/build.go Normal file
View file

@ -0,0 +1,956 @@
package build
import (
"bufio"
"bytes"
"errors"
"fmt"
"go/ast"
"go/parser"
"go/token"
"io"
"log"
"log/slog"
"os"
"os/exec"
"path"
"path/filepath"
"regexp"
"strconv"
"strings"
"text/template"
"github.com/ldez/go-git-cmd-wrapper/v2/clone"
"github.com/ldez/go-git-cmd-wrapper/v2/git"
)
const (
OAS_REPO_NAME = "stackit-api-specifications"
OAS_REPO = "https://github.com/stackitcloud/stackit-api-specifications.git"
GEN_REPO_NAME = "stackit-sdk-generator"
GEN_REPO = "https://github.com/stackitcloud/stackit-sdk-generator.git"
)
type version struct {
verString string
major int
minor int
}
type Builder struct {
SkipClone bool
SkipCleanup bool
PackagesOnly bool
}
func (b *Builder) Build() error {
slog.Info("Starting Builder")
if b.PackagesOnly {
slog.Info(" >>> only generating pkg_gen <<<")
}
root, err := getRoot()
if err != nil {
log.Fatal(err)
}
if root == nil || *root == "" {
return fmt.Errorf("unable to determine root directory from git")
}
slog.Info(" ... using root directory", "dir", *root)
if !b.PackagesOnly {
slog.Info(" ... Checking needed commands available")
err := checkCommands([]string{"tfplugingen-framework", "tfplugingen-openapi"})
if err != nil {
return err
}
}
if !b.SkipCleanup {
slog.Info("Cleaning up old packages directory")
err = os.RemoveAll(path.Join(*root, "pkg_gen"))
if err != nil {
return err
}
}
if !b.SkipCleanup && !b.PackagesOnly {
slog.Info("Cleaning up old packages directory")
err = os.RemoveAll(path.Join(*root, "pkg_gen"))
if err != nil {
return err
}
}
slog.Info("Creating generator dir", "dir", fmt.Sprintf("%s/%s", *root, GEN_REPO_NAME))
genDir := path.Join(*root, GEN_REPO_NAME)
if !b.SkipClone {
err = createGeneratorDir(GEN_REPO, genDir, b.SkipClone)
if err != nil {
return err
}
}
slog.Info("Creating oas repo dir", "dir", fmt.Sprintf("%s/%s", *root, OAS_REPO_NAME))
repoDir, err := createRepoDir(genDir, OAS_REPO, OAS_REPO_NAME, b.SkipClone)
if err != nil {
return fmt.Errorf("%s", err.Error())
}
slog.Info("Retrieving versions from subdirs")
// TODO - major
verMap, err := getVersions(repoDir)
if err != nil {
return fmt.Errorf("%s", err.Error())
}
slog.Info("Reducing to only latest or highest")
res, err := getOnlyLatest(verMap)
if err != nil {
return fmt.Errorf("%s", err.Error())
}
slog.Info("Creating OAS dir")
err = os.MkdirAll(path.Join(genDir, "oas"), 0755)
if err != nil {
return err
}
slog.Info("Copying OAS files")
for service, item := range res {
baseService := strings.TrimSuffix(service, "alpha")
baseService = strings.TrimSuffix(baseService, "beta")
itemVersion := fmt.Sprintf("v%d%s", item.major, item.verString)
if item.minor != 0 {
itemVersion = itemVersion + "" + strconv.Itoa(item.minor)
}
srcFile := path.Join(
repoDir,
"services",
baseService,
itemVersion,
fmt.Sprintf("%s.json", baseService),
)
dstFile := path.Join(genDir, "oas", fmt.Sprintf("%s.json", service))
_, err = copyFile(srcFile, dstFile)
if err != nil {
return fmt.Errorf("%s", err.Error())
}
}
slog.Info("Changing dir", "dir", genDir)
err = os.Chdir(genDir)
if err != nil {
return err
}
slog.Info("Calling make", "command", "generate-go-sdk")
cmd := exec.Command("make", "generate-go-sdk")
var stdOut, stdErr bytes.Buffer
cmd.Stdout = &stdOut
cmd.Stderr = &stdErr
if err = cmd.Start(); err != nil {
slog.Error("cmd.Start", "error", err)
return err
}
if err = cmd.Wait(); err != nil {
var exitErr *exec.ExitError
if errors.As(err, &exitErr) {
slog.Error("cmd.Wait", "code", exitErr.ExitCode(), "error", err, "stdout", stdOut.String(), "stderr", stdErr.String())
return fmt.Errorf("%s", stdErr.String())
}
if err != nil {
slog.Error("cmd.Wait", "err", err)
return err
}
}
slog.Info("Cleaning up go.mod and go.sum files")
cleanDir := path.Join(genDir, "sdk-repo-updated", "services")
dirEntries, err := os.ReadDir(cleanDir)
if err != nil {
return err
}
for _, entry := range dirEntries {
if entry.IsDir() {
err = deleteFiles(
path.Join(cleanDir, entry.Name(), "go.mod"),
path.Join(cleanDir, entry.Name(), "go.sum"),
)
if err != nil {
return err
}
}
}
slog.Info("Changing dir", "dir", *root)
err = os.Chdir(*root)
if err != nil {
return err
}
slog.Info("Rearranging package directories")
err = os.MkdirAll(path.Join(*root, "pkg_gen"), 0755) // noqa:gosec
if err != nil {
return err
}
srcDir := path.Join(genDir, "sdk-repo-updated", "services")
items, err := os.ReadDir(srcDir)
if err != nil {
return err
}
for _, item := range items {
if item.IsDir() {
slog.Info(" -> package", "name", item.Name())
tgtDir := path.Join(*root, "pkg_gen", item.Name())
if fileExists(tgtDir) {
delErr := os.RemoveAll(tgtDir)
if delErr != nil {
return delErr
}
}
err = os.Rename(path.Join(srcDir, item.Name()), tgtDir)
if err != nil {
return err
}
}
}
if !b.PackagesOnly {
slog.Info("Generating service boilerplate")
err = generateServiceFiles(*root, path.Join(*root, GEN_REPO_NAME))
if err != nil {
return err
}
slog.Info("Copying all service files")
err = CopyDirectory(
path.Join(*root, "generated", "internal", "services"),
path.Join(*root, "stackit", "internal", "services"),
)
if err != nil {
return err
}
err = createBoilerplate(*root, path.Join(*root, "stackit", "internal", "services"))
if err != nil {
return err
}
}
if !b.SkipCleanup {
slog.Info("Finally removing temporary files and directories")
err = os.RemoveAll(path.Join(*root, "generated"))
if err != nil {
slog.Error("RemoveAll", "dir", path.Join(*root, "generated"), "err", err)
return err
}
err = os.RemoveAll(path.Join(*root, GEN_REPO_NAME))
if err != nil {
slog.Error("RemoveAll", "dir", path.Join(*root, GEN_REPO_NAME), "err", err)
return err
}
slog.Info("Cleaning up", "dir", repoDir)
err = os.RemoveAll(filepath.Dir(repoDir))
if err != nil {
return fmt.Errorf("%s", err.Error())
}
}
slog.Info("Done")
return nil
}
type templateData struct {
PackageName string
PackageNameCamel string
PackageNamePascal string
NameCamel string
NamePascal string
NameSnake string
Fields []string
}
func fileExists(path string) bool {
_, err := os.Stat(path)
if os.IsNotExist(err) {
return false
}
if err != nil {
panic(err)
}
return true
}
func createBoilerplate(rootFolder, folder string) error {
services, err := os.ReadDir(folder)
if err != nil {
return err
}
for _, svc := range services {
if !svc.IsDir() {
continue
}
resources, err := os.ReadDir(path.Join(folder, svc.Name()))
if err != nil {
return err
}
var handleDS bool
var handleRes bool
var foundDS bool
var foundRes bool
for _, res := range resources {
if !res.IsDir() {
continue
}
resourceName := res.Name()
dsFile := path.Join(folder, svc.Name(), res.Name(), "datasources_gen", fmt.Sprintf("%s_data_source_gen.go", res.Name()))
handleDS = fileExists(dsFile)
resFile := path.Join(folder, svc.Name(), res.Name(), "resources_gen", fmt.Sprintf("%s_resource_gen.go", res.Name()))
handleRes = fileExists(resFile)
dsGoFile := path.Join(folder, svc.Name(), res.Name(), "datasource.go")
foundDS = fileExists(dsGoFile)
resGoFile := path.Join(folder, svc.Name(), res.Name(), "resource.go")
foundRes = fileExists(resGoFile)
if handleDS && !foundDS {
slog.Info(" creating missing datasource.go", "service", svc.Name(), "resource", resourceName)
if !ValidateSnakeCase(resourceName) {
return errors.New("resource name is invalid")
}
fields, tokenErr := getTokens(dsFile)
if tokenErr != nil {
return fmt.Errorf("error reading tokens: %w", tokenErr)
}
tplName := "data_source_scaffold.gotmpl"
err = writeTemplateToFile(
tplName,
path.Join(rootFolder, "cmd", "cmd", "build", "templates", tplName),
dsGoFile,
&templateData{
PackageName: svc.Name(),
PackageNameCamel: ToCamelCase(svc.Name()),
PackageNamePascal: ToPascalCase(svc.Name()),
NameCamel: ToCamelCase(resourceName),
NamePascal: ToPascalCase(resourceName),
NameSnake: resourceName,
Fields: fields,
},
)
if err != nil {
panic(err)
}
}
if handleRes && !foundRes {
slog.Info(" creating missing resource.go", "service", svc.Name(), "resource", resourceName)
if !ValidateSnakeCase(resourceName) {
return errors.New("resource name is invalid")
}
fields, tokenErr := getTokens(resFile)
if tokenErr != nil {
return fmt.Errorf("error reading tokens: %w", tokenErr)
}
tplName := "resource_scaffold.gotmpl"
err = writeTemplateToFile(
tplName,
path.Join(rootFolder, "cmd", "cmd", "build", "templates", tplName),
resGoFile,
&templateData{
PackageName: svc.Name(),
PackageNameCamel: ToCamelCase(svc.Name()),
PackageNamePascal: ToPascalCase(svc.Name()),
NameCamel: ToCamelCase(resourceName),
NamePascal: ToPascalCase(resourceName),
NameSnake: resourceName,
Fields: fields,
},
)
if err != nil {
return err
}
if !fileExists(path.Join(folder, svc.Name(), res.Name(), "functions.go")) {
slog.Info(" creating missing functions.go", "service", svc.Name(), "resource", resourceName)
if !ValidateSnakeCase(resourceName) {
return errors.New("resource name is invalid")
}
fncTplName := "functions_scaffold.gotmpl"
err = writeTemplateToFile(
fncTplName,
path.Join(rootFolder, "cmd", "cmd", "build", "templates", fncTplName),
path.Join(folder, svc.Name(), res.Name(), "functions.go"),
&templateData{
PackageName: svc.Name(),
PackageNameCamel: ToCamelCase(svc.Name()),
PackageNamePascal: ToPascalCase(svc.Name()),
NameCamel: ToCamelCase(resourceName),
NamePascal: ToPascalCase(resourceName),
NameSnake: resourceName,
},
)
if err != nil {
return err
}
}
}
}
}
return nil
}
func ucfirst(s string) string {
if len(s) == 0 {
return ""
}
return strings.ToUpper(s[:1]) + s[1:]
}
func writeTemplateToFile(tplName, tplFile, outFile string, data *templateData) error {
fn := template.FuncMap{
"ucfirst": ucfirst,
}
tmpl, err := template.New(tplName).Funcs(fn).ParseFiles(tplFile)
if err != nil {
return err
}
var f *os.File
f, err = os.Create(outFile)
if err != nil {
return err
}
err = tmpl.Execute(f, *data)
if err != nil {
return err
}
err = f.Close()
if err != nil {
return err
}
return nil
}
func generateServiceFiles(rootDir, generatorDir string) error {
// slog.Info("Generating specs folder")
err := os.MkdirAll(path.Join(rootDir, "generated", "specs"), 0755)
if err != nil {
return err
}
services, err := os.ReadDir(path.Join(rootDir, "service_specs"))
if err != nil {
return err
}
for _, service := range services {
if !service.IsDir() {
continue
}
versions, err := os.ReadDir(path.Join(rootDir, "service_specs", service.Name()))
if err != nil {
return err
}
for _, svcVersion := range versions {
if !svcVersion.IsDir() {
continue
}
// TODO: use const of supported versions
if svcVersion.Name() != "alpha" && svcVersion.Name() != "beta" {
continue
}
specFiles, err := os.ReadDir(path.Join(rootDir, "service_specs", service.Name(), svcVersion.Name()))
if err != nil {
return err
}
for _, specFile := range specFiles {
if specFile.IsDir() {
continue
}
// slog.Info("Checking spec", "name", spec.Name())
r := regexp.MustCompile(`^(.*)_config.yml$`)
matches := r.FindAllStringSubmatch(specFile.Name(), -1)
if matches != nil {
fileName := matches[0][0]
resource := matches[0][1]
slog.Info(
" found service spec",
"name",
specFile.Name(),
"service",
service.Name(),
"resource",
resource,
)
oasFile := path.Join(generatorDir, "oas", fmt.Sprintf("%s%s.json", service.Name(), svcVersion.Name()))
if _, oasErr := os.Stat(oasFile); os.IsNotExist(oasErr) {
slog.Warn(" could not find matching oas", "svc", service.Name(), "version", svcVersion.Name())
continue
}
scName := fmt.Sprintf("%s%s", service.Name(), svcVersion.Name())
scName = strings.ReplaceAll(scName, "-", "")
err = os.MkdirAll(path.Join(rootDir, "generated", "internal", "services", scName, resource), 0755)
if err != nil {
return err
}
// slog.Info("Generating openapi spec json")
specJsonFile := path.Join(rootDir, "generated", "specs", fmt.Sprintf("%s_%s_spec.json", scName, resource))
var stdOut, stdErr bytes.Buffer
// noqa:gosec
cmd := exec.Command(
"tfplugingen-openapi",
"generate",
"--config",
path.Join(rootDir, "service_specs", service.Name(), svcVersion.Name(), fileName),
"--output",
specJsonFile,
oasFile,
)
cmd.Stdout = &stdOut
cmd.Stderr = &stdErr
if err = cmd.Start(); err != nil {
slog.Error(
"tfplugingen-openapi generate",
"error",
err,
"stdOut",
stdOut.String(),
"stdErr",
stdErr.String(),
)
return err
}
if err = cmd.Wait(); err != nil {
var exitErr *exec.ExitError
if errors.As(err, &exitErr) {
slog.Error("tfplugingen-openapi generate", "code", exitErr.ExitCode(), "error", err, "stdout", stdOut.String(), "stderr", stdErr.String())
return fmt.Errorf("%s", stdErr.String())
}
if err != nil {
slog.Error("tfplugingen-openapi generate", "err", err, "stdout", stdOut.String(), "stderr", stdErr.String())
return err
}
}
if stdOut.Len() > 0 {
slog.Warn(" command output", "stdout", stdOut.String(), "stderr", stdErr.String())
}
// slog.Info("Creating terraform svc resource files folder")
tgtFolder := path.Join(rootDir, "generated", "internal", "services", scName, resource, "resources_gen")
err = os.MkdirAll(tgtFolder, 0755)
if err != nil {
return err
}
// slog.Info("Generating terraform svc resource files")
// noqa:gosec
cmd2 := exec.Command(
"tfplugingen-framework",
"generate",
"resources",
"--input",
specJsonFile,
"--output",
tgtFolder,
"--package",
scName,
)
cmd2.Stdout = &stdOut
cmd2.Stderr = &stdErr
if err = cmd2.Start(); err != nil {
slog.Error("tfplugingen-framework generate resources", "error", err)
return err
}
if err = cmd2.Wait(); err != nil {
var exitErr *exec.ExitError
if errors.As(err, &exitErr) {
slog.Error("tfplugingen-framework generate resources", "code", exitErr.ExitCode(), "error", err, "stdout", stdOut.String(), "stderr", stdErr.String())
return fmt.Errorf("%s", stdErr.String())
}
if err != nil {
slog.Error("tfplugingen-framework generate resources", "err", err, "stdout", stdOut.String(), "stderr", stdErr.String())
return err
}
}
// slog.Info("Creating terraform svc datasource files folder")
tgtFolder = path.Join(rootDir, "generated", "internal", "services", scName, resource, "datasources_gen")
err = os.MkdirAll(tgtFolder, 0755)
if err != nil {
return err
}
// slog.Info("Generating terraform svc resource files")
// noqa:gosec
cmd3 := exec.Command(
"tfplugingen-framework",
"generate",
"data-sources",
"--input",
specJsonFile,
"--output",
tgtFolder,
"--package",
scName,
)
var stdOut3, stdErr3 bytes.Buffer
cmd3.Stdout = &stdOut3
cmd3.Stderr = &stdErr3
if err = cmd3.Start(); err != nil {
slog.Error("tfplugingen-framework generate data-sources", "error", err)
return err
}
if err = cmd3.Wait(); err != nil {
var exitErr *exec.ExitError
if errors.As(err, &exitErr) {
slog.Error("tfplugingen-framework generate data-sources", "code", exitErr.ExitCode(), "error", err, "stdout", stdOut.String(), "stderr", stdErr.String())
return fmt.Errorf("%s", stdErr.String())
}
if err != nil {
slog.Error("tfplugingen-framework generate data-sources", "err", err, "stdout", stdOut.String(), "stderr", stdErr.String())
return err
}
}
tfAnoErr := handleTfTagForDatasourceFile(
path.Join(tgtFolder, fmt.Sprintf("%s_data_source_gen.go", resource)),
scName,
resource,
)
if tfAnoErr != nil {
return tfAnoErr
}
}
}
}
}
return nil
}
// handleTfTagForDatasourceFile replaces existing "id" with "stf_original_api_id"
func handleTfTagForDatasourceFile(filePath, service, resource string) error {
slog.Info(" handle terraform tag for datasource", "service", service, "resource", resource)
if !fileExists(filePath) {
slog.Warn(" could not find file, skipping", "path", filePath)
return nil
}
f, err := os.Open(filePath)
if err != nil {
return err
}
defer f.Close()
root, err := getRoot()
if err != nil {
log.Fatal(err)
}
tmp, err := os.CreateTemp(*root, "replace-*")
if err != nil {
return err
}
defer tmp.Close()
sc := bufio.NewScanner(f)
for sc.Scan() {
resLine, err := handleLine(sc.Text())
if err != nil {
return err
}
if _, err := tmp.WriteString(resLine + "\n"); err != nil {
return err
}
}
if scErr := sc.Err(); scErr != nil {
return scErr
}
if err := tmp.Close(); err != nil {
return err
}
if err := f.Close(); err != nil {
return err
}
if err := os.Rename(tmp.Name(), filePath); err != nil {
log.Fatal(err)
}
return nil
}
func handleLine(line string) (string, error) {
schemaRegex := regexp.MustCompile(`(\s+")(id)(": schema.[a-zA-Z0-9]+Attribute{)`)
schemaMatches := schemaRegex.FindAllStringSubmatch(line, -1)
if schemaMatches != nil {
return fmt.Sprintf("%stf_original_api_id%s", schemaMatches[0][1], schemaMatches[0][3]), nil
}
modelRegex := regexp.MustCompile(`(\s+Id\s+types.[a-zA-Z0-9]+\s+.tfsdk:")(id)(".)`)
modelMatches := modelRegex.FindAllStringSubmatch(line, -1)
if modelMatches != nil {
return fmt.Sprintf("%stf_original_api_id%s", modelMatches[0][1], modelMatches[0][3]), nil
}
return line, nil
}
func checkCommands(commands []string) error {
for _, commandName := range commands {
if !commandExists(commandName) {
return fmt.Errorf("missing command %s", commandName)
}
slog.Info(" found", "command", commandName)
}
return nil
}
func commandExists(cmd string) bool {
_, err := exec.LookPath(cmd)
return err == nil
}
func deleteFiles(fNames ...string) error {
for _, fName := range fNames {
if _, err := os.Stat(fName); !os.IsNotExist(err) {
err = os.Remove(fName)
if err != nil {
return err
}
}
}
return nil
}
func copyFile(src, dst string) (int64, error) {
sourceFileStat, err := os.Stat(src)
if err != nil {
return 0, err
}
if !sourceFileStat.Mode().IsRegular() {
return 0, fmt.Errorf("%s is not a regular file", src)
}
source, err := os.Open(src)
if err != nil {
return 0, err
}
defer source.Close()
destination, err := os.Create(dst)
if err != nil {
return 0, err
}
defer destination.Close()
nBytes, err := io.Copy(destination, source)
return nBytes, err
}
func getOnlyLatest(m map[string]version) (map[string]version, error) {
tmpMap := make(map[string]version)
for k, v := range m {
item, ok := tmpMap[k]
if !ok {
tmpMap[k] = v
} else {
if item.major == v.major && item.minor < v.minor {
tmpMap[k] = v
}
}
}
return tmpMap, nil
}
func getVersions(dir string) (map[string]version, error) {
res := make(map[string]version)
children, err := os.ReadDir(path.Join(dir, "services"))
if err != nil {
return nil, err
}
for _, entry := range children {
if entry.IsDir() {
versions, err := os.ReadDir(path.Join(dir, "services", entry.Name()))
if err != nil {
return nil, err
}
m, err2 := extractVersions(entry.Name(), versions)
if err2 != nil {
return m, err2
}
for k, v := range m {
res[k] = v
}
}
}
return res, nil
}
func extractVersions(service string, versionDirs []os.DirEntry) (map[string]version, error) {
res := make(map[string]version)
for _, vDir := range versionDirs {
if vDir.IsDir() {
r := regexp.MustCompile(`v([0-9]+)([a-z]+)([0-9]*)`)
matches := r.FindAllStringSubmatch(vDir.Name(), -1)
if matches == nil {
continue
}
svc, ver, err := handleVersion(service, matches[0])
if err != nil {
return nil, err
}
if svc != nil && ver != nil {
res[*svc] = *ver
}
}
}
return res, nil
}
func handleVersion(service string, match []string) (*string, *version, error) {
if match == nil {
fmt.Println("no matches")
return nil, nil, nil
}
verString := match[2]
if verString != "alpha" && verString != "beta" {
return nil, nil, errors.New("unsupported version")
}
majVer, err := strconv.Atoi(match[1])
if err != nil {
return nil, nil, err
}
if match[3] == "" {
match[3] = "0"
}
minVer, err := strconv.Atoi(match[3])
if err != nil {
return nil, nil, err
}
resStr := fmt.Sprintf("%s%s", service, verString)
return &resStr, &version{verString: verString, major: majVer, minor: minVer}, nil
}
func createRepoDir(root, repoUrl, repoName string, skipClone bool) (string, error) {
targetDir := path.Join(root, repoName)
if !skipClone {
if fileExists(targetDir) {
slog.Warn("target dir exists - skipping", "targetDir", targetDir)
return targetDir, nil
}
_, err := git.Clone(
clone.Repository(repoUrl),
clone.Directory(targetDir),
)
if err != nil {
return "", err
}
}
return targetDir, nil
}
func createGeneratorDir(repoUrl, targetDir string, skipClone bool) error {
if !skipClone {
if fileExists(targetDir) {
remErr := os.RemoveAll(targetDir)
if remErr != nil {
return remErr
}
}
_, cloneErr := git.Clone(
clone.Repository(repoUrl),
clone.Directory(targetDir),
)
if cloneErr != nil {
return cloneErr
}
}
return nil
}
func getRoot() (*string, error) {
cmd := exec.Command("git", "rev-parse", "--show-toplevel")
out, err := cmd.Output()
if err != nil {
return nil, err
}
lines := strings.Split(string(out), "\n")
return &lines[0], nil
}
func getTokens(fileName string) ([]string, error) {
fset := token.NewFileSet()
var result []string
node, err := parser.ParseFile(fset, fileName, nil, parser.ParseComments)
if err != nil {
return nil, err
}
ast.Inspect(node, func(n ast.Node) bool {
// Suche nach Typ-Deklarationen (structs)
ts, ok := n.(*ast.TypeSpec)
if ok {
if strings.Contains(ts.Name.Name, "Model") {
// fmt.Printf("found model: %s\n", ts.Name.Name)
ast.Inspect(ts, func(sn ast.Node) bool {
tts, tok := sn.(*ast.Field)
if tok {
// fmt.Printf(" found: %+v\n", tts.Names[0])
// spew.Dump(tts.Type)
result = append(result, tts.Names[0].String())
// fld, fldOk := tts.Type.(*ast.Ident)
//if fldOk {
// fmt.Printf("type: %+v\n", fld)
//}
}
return true
})
}
}
return true
})
return result, nil
}

View file

@ -3,7 +3,6 @@ package build
import ( import (
"fmt" "fmt"
"io" "io"
"log/slog"
"os" "os"
"path/filepath" "path/filepath"
"syscall" "syscall"
@ -75,24 +74,14 @@ func Copy(srcFile, dstFile string) error {
return err return err
} }
defer func(out *os.File) { defer out.Close()
err := out.Close()
if err != nil {
slog.Error("failed to close file", slog.Any("err", err))
}
}(out)
in, err := os.Open(srcFile) in, err := os.Open(srcFile)
if err != nil { if err != nil {
return err return err
} }
defer func(in *os.File) { defer in.Close()
err := in.Close()
if err != nil {
slog.Error("error closing destination file", slog.Any("err", err))
}
}(in)
_, err = io.Copy(out, in) _, err = io.Copy(out, in)
if err != nil { if err != nil {

View file

@ -3,28 +3,24 @@ package cmd
import ( import (
"github.com/spf13/cobra" "github.com/spf13/cobra"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/generator/cmd/build" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/cmd/cmd/build"
) )
var ( var (
skipCleanup bool skipCleanup bool
skipClone bool skipClone bool
packagesOnly bool packagesOnly bool
verbose bool
debug bool
) )
var buildCmd = &cobra.Command{ var buildCmd = &cobra.Command{
Use: "build", Use: "build",
Short: "Build the necessary boilerplate", Short: "Build the necessary boilerplate",
Long: `...`, Long: `...`,
RunE: func(_ *cobra.Command, _ []string) error { RunE: func(cmd *cobra.Command, args []string) error {
b := build.Builder{ b := build.Builder{
SkipClone: skipClone, SkipClone: skipClone,
SkipCleanup: skipCleanup, SkipCleanup: skipCleanup,
PackagesOnly: packagesOnly, PackagesOnly: packagesOnly,
Verbose: verbose,
Debug: debug,
} }
return b.Build() return b.Build()
}, },
@ -34,10 +30,8 @@ func NewBuildCmd() *cobra.Command {
return buildCmd return buildCmd
} }
func init() { //nolint:gochecknoinits // This is the standard way to set up Cobra commands func init() { // nolint: gochecknoinits
buildCmd.Flags().BoolVarP(&skipCleanup, "skip-clean", "c", false, "Skip cleanup steps") buildCmd.Flags().BoolVarP(&skipCleanup, "skip-clean", "c", false, "Skip cleanup steps")
buildCmd.Flags().BoolVarP(&debug, "debug", "d", false, "Enable debug output")
buildCmd.Flags().BoolVarP(&skipClone, "skip-clone", "g", false, "Skip cloning from git") buildCmd.Flags().BoolVarP(&skipClone, "skip-clone", "g", false, "Skip cloning from git")
buildCmd.Flags().BoolVarP(&packagesOnly, "packages-only", "p", false, "Only generate packages") buildCmd.Flags().BoolVarP(&packagesOnly, "packages-only", "p", false, "Only generate packages")
buildCmd.Flags().BoolVarP(&verbose, "verbose", "v", false, "verbose - show more logs")
} }

View file

@ -12,7 +12,8 @@ var examplesCmd = &cobra.Command{
Use: "examples", Use: "examples",
Short: "create examples", Short: "create examples",
Long: `...`, Long: `...`,
RunE: func(_ *cobra.Command, _ []string) error { RunE: func(cmd *cobra.Command, args []string) error {
//filePathStr := "stackit/internal/services/postgresflexalpha/database/datasources_gen/database_data_source_gen.go" //filePathStr := "stackit/internal/services/postgresflexalpha/database/datasources_gen/database_data_source_gen.go"
// //
//src, err := os.ReadFile(filePathStr) //src, err := os.ReadFile(filePathStr)

View file

@ -24,7 +24,7 @@ var getFieldsCmd = &cobra.Command{
Use: "get-fields", Use: "get-fields",
Short: "get fields from file", Short: "get fields from file",
Long: `...`, Long: `...`,
PreRunE: func(_ *cobra.Command, _ []string) error { PreRunE: func(cmd *cobra.Command, args []string) error {
typeStr := "data_source" typeStr := "data_source"
if resType != "resource" && resType != "datasource" { if resType != "resource" && resType != "datasource" {
return fmt.Errorf("--type can only be resource or datasource") return fmt.Errorf("--type can only be resource or datasource")
@ -82,7 +82,7 @@ var getFieldsCmd = &cobra.Command{
//} //}
return nil return nil
}, },
RunE: func(_ *cobra.Command, _ []string) error { RunE: func(cmd *cobra.Command, args []string) error {
return getFields(filePath) return getFields(filePath)
}, },
} }
@ -107,26 +107,31 @@ func getTokens(fileName string) ([]string, error) {
return nil, err return nil, err
} }
ast.Inspect( ast.Inspect(node, func(n ast.Node) bool {
node, func(n ast.Node) bool {
// Suche nach Typ-Deklarationen (structs) // Suche nach Typ-Deklarationen (structs)
ts, ok := n.(*ast.TypeSpec) ts, ok := n.(*ast.TypeSpec)
if ok { if ok {
if strings.Contains(ts.Name.Name, "Model") { if strings.Contains(ts.Name.Name, "Model") {
ast.Inspect( // fmt.Printf("found model: %s\n", ts.Name.Name)
ts, func(sn ast.Node) bool { ast.Inspect(ts, func(sn ast.Node) bool {
tts, tok := sn.(*ast.Field) tts, tok := sn.(*ast.Field)
if tok { if tok {
// fmt.Printf(" found: %+v\n", tts.Names[0])
// spew.Dump(tts.Type)
result = append(result, tts.Names[0].String()) result = append(result, tts.Names[0].String())
// fld, fldOk := tts.Type.(*ast.Ident)
//if fldOk {
// fmt.Printf("type: %+v\n", fld)
//}
} }
return true return true
}, })
)
} }
} }
return true return true
}, })
)
return result, nil return result, nil
} }
@ -134,15 +139,9 @@ func NewGetFieldsCmd() *cobra.Command {
return getFieldsCmd return getFieldsCmd
} }
func init() { //nolint:gochecknoinits //this is the only way to add the command to the rootCmd func init() { // nolint: gochecknoinits
getFieldsCmd.Flags().StringVarP(&inFile, "infile", "i", "", "input filename incl path") getFieldsCmd.Flags().StringVarP(&inFile, "infile", "i", "", "input filename incl path")
getFieldsCmd.Flags().StringVarP(&svcName, "service", "s", "", "service name") getFieldsCmd.Flags().StringVarP(&svcName, "service", "s", "", "service name")
getFieldsCmd.Flags().StringVarP(&resName, "resource", "r", "", "resource name") getFieldsCmd.Flags().StringVarP(&resName, "resource", "r", "", "resource name")
getFieldsCmd.Flags().StringVarP( getFieldsCmd.Flags().StringVarP(&resType, "type", "t", "resource", "resource type (data-source or resource [default])")
&resType,
"type",
"t",
"resource",
"resource type (data-source or resource [default])",
)
} }

View file

@ -35,27 +35,36 @@ type GpgPublicKey struct {
} }
func (p *Provider) CreateArchitectureFiles() error { func (p *Provider) CreateArchitectureFiles() error {
// var namespace, provider, distPath, repoName, version, gpgFingerprint, gpgPubKeyFile, domain string
log.Println("* Creating architecture files in target directories") log.Println("* Creating architecture files in target directories")
// filename = terraform-provider-[provider]_0.0.1_darwin_amd64.zip - provider_name + version + target + architecture + .zip
// prefix := fmt.Sprintf("v1/providers/%s/%s/%s/", namespace, provider, version)
prefix := path.Join("v1", "providers", p.Namespace, p.Provider, p.Version) prefix := path.Join("v1", "providers", p.Namespace, p.Provider, p.Version)
// pathPrefix := fmt.Sprintf("release/%s", prefix)
pathPrefix := path.Join("release", prefix) pathPrefix := path.Join("release", prefix)
// urlPrefix := fmt.Sprintf("https://%s/%s", domain, prefix)
urlPrefix, err := url.JoinPath("https://", p.Domain, prefix) urlPrefix, err := url.JoinPath("https://", p.Domain, prefix)
if err != nil { if err != nil {
return fmt.Errorf("error creating base url: %w", err) return fmt.Errorf("error creating base url: %w", err)
} }
// download url = https://example.com/v1/providers/namespace/provider/0.0.1/download/terraform-provider_0.0.1_darwin_amd64.zip
downloadUrlPrefix, err := url.JoinPath(urlPrefix, "download") downloadUrlPrefix, err := url.JoinPath(urlPrefix, "download")
if err != nil { if err != nil {
return fmt.Errorf("error crearting download url: %w", err) return fmt.Errorf("error crearting download url: %w", err)
} }
downloadPathPrefix := path.Join(pathPrefix, "download") downloadPathPrefix := path.Join(pathPrefix, "download")
// shasums url = https://example.com/v1/providers/namespace/provider/0.0.1/terraform-provider_0.0.1_SHA256SUMS
shasumsUrl, err := url.JoinPath(urlPrefix, fmt.Sprintf("%s_%s_SHA256SUMS", p.RepoName, p.Version)) shasumsUrl, err := url.JoinPath(urlPrefix, fmt.Sprintf("%s_%s_SHA256SUMS", p.RepoName, p.Version))
if err != nil { if err != nil {
return fmt.Errorf("error creating shasums url: %w", err) return fmt.Errorf("error creating shasums url: %w", err)
} }
// shasums_signature_url = https://example.com/v1/providers/namespace/provider/0.0.1/terraform-provider_0.0.1_SHA256SUMS.sig
shasumsSigUrl := shasumsUrl + ".sig" shasumsSigUrl := shasumsUrl + ".sig"
gpgAsciiPub, err := p.ReadGpgFile() gpgAsciiPub, err := p.ReadGpgFile()
@ -107,6 +116,33 @@ func (p *Provider) CreateArchitectureFiles() error {
}, },
}, },
} }
// var architectureTemplate = []byte(fmt.Sprintf(`
//{
// "protocols": [
// "4.0",
// "5.1",
// "6.0"
// ],
// "os": "%s",
// "arch": "%s",
// "filename": "%s",
// "download_url": "%s",
// "shasums_url": "%s",
// "shasums_signature_url": "%s",
// "shasum": "%s",
// "signing_keys": {
// "gpg_public_keys": [
// {
// "key_id": "%s",
// "ascii_armor": "%s",
// "trust_signature": "",
// "source": "",
// "source_url": ""
// }
// ]
// }
//}
// `, target, arch, fileName, downloadUrl, shasumsUrl, shasumsSigUrl, shasum, gpgFingerprint, gpgAsciiPub))
log.Printf(" - Arch file: %s", archFileName) log.Printf(" - Arch file: %s", archFileName)
@ -124,12 +160,8 @@ func WriteArchitectureFile(filePath string, arch Architecture) error {
if err != nil { if err != nil {
return fmt.Errorf("error encoding data: %w", err) return fmt.Errorf("error encoding data: %w", err)
} }
//nolint:gosec // this file is not sensitive, so we can use os.ModePerm
err = os.WriteFile( err = os.WriteFile(filePath, jsonString, os.ModePerm)
filePath,
jsonString,
os.ModePerm,
)
if err != nil { if err != nil {
return fmt.Errorf("error writing data: %w", err) return fmt.Errorf("error writing data: %w", err)
} }

View file

@ -161,12 +161,10 @@ func (p *Provider) createVersionsFile() error {
target := fileNameSplit[2] target := fileNameSplit[2]
arch := fileNameSplit[3] arch := fileNameSplit[3]
version.Platforms = append( version.Platforms = append(version.Platforms, Platform{
version.Platforms, Platform{
OS: target, OS: target,
Arch: arch, Arch: arch,
}, })
)
} }
data := Data{} data := Data{}
@ -208,19 +206,16 @@ func (p *Provider) CreateWellKnown() error {
log.Println("* Creating .well-known directory") log.Println("* Creating .well-known directory")
pathString := path.Join(p.RootPath, "release", ".well-known") pathString := path.Join(p.RootPath, "release", ".well-known")
//nolint:gosec // this file is not sensitive, so we can use ModePerm
err := os.MkdirAll(pathString, os.ModePerm) err := os.MkdirAll(pathString, os.ModePerm)
if err != nil && !errors.Is(err, fs.ErrExist) { if err != nil && !errors.Is(err, fs.ErrExist) {
return fmt.Errorf("error creating '%s' dir: %w", pathString, err) return fmt.Errorf("error creating '%s' dir: %w", pathString, err)
} }
log.Println(" - Writing to .well-known/terraform.json file") log.Println(" - Writing to .well-known/terraform.json file")
//nolint:gosec // this file is not sensitive, so we can use 0644
err = os.WriteFile( err = os.WriteFile(
fmt.Sprintf("%s/terraform.json", pathString), fmt.Sprintf("%s/terraform.json", pathString),
[]byte(`{"providers.v1": "/v1/providers/"}`), []byte(`{"providers.v1": "/v1/providers/"}`),
0o644, 0644,
) )
if err != nil { if err != nil {
return err return err
@ -229,10 +224,9 @@ func (p *Provider) CreateWellKnown() error {
return nil return nil
} }
func CreateDir(pathValue string) error { func CreateDir(path string) error {
log.Printf("* Creating %s directory", pathValue) log.Printf("* Creating %s directory", path)
//nolint:gosec // this file is not sensitive, so we can use ModePerm err := os.MkdirAll(path, os.ModePerm)
err := os.MkdirAll(pathValue, os.ModePerm)
if errors.Is(err, fs.ErrExist) { if errors.Is(err, fs.ErrExist) {
return nil return nil
} }
@ -275,23 +269,13 @@ func CopyFile(src, dst string) (int64, error) {
if err != nil { if err != nil {
return 0, err return 0, err
} }
defer func(source *os.File) { defer source.Close()
err := source.Close()
if err != nil {
slog.Error("error closing source file", slog.Any("err", err))
}
}(source)
destination, err := os.Create(dst) destination, err := os.Create(dst)
if err != nil { if err != nil {
return 0, err return 0, err
} }
defer func(destination *os.File) { defer destination.Close()
err := destination.Close()
if err != nil {
slog.Error("error closing destination file", slog.Any("err", err))
}
}(destination)
nBytes, err := io.Copy(destination, source) nBytes, err := io.Copy(destination, source)
return nBytes, err return nBytes, err
} }

View file

@ -35,12 +35,7 @@ func (d *Data) WriteToFile(filePath string) error {
return fmt.Errorf("error encoding data: %w", err) return fmt.Errorf("error encoding data: %w", err)
} }
//nolint:gosec // this file is not sensitive, so we can use os.ModePerm err = os.WriteFile(filePath, jsonString, os.ModePerm)
err = os.WriteFile(
filePath,
jsonString,
os.ModePerm,
)
if err != nil { if err != nil {
return fmt.Errorf("error writing data: %w", err) return fmt.Errorf("error writing data: %w", err)
} }
@ -91,13 +86,7 @@ func (d *Data) LoadFromUrl(uri string) error {
if err != nil { if err != nil {
return err return err
} }
defer func(name string) { defer os.Remove(file.Name()) // Clean up
//nolint:gosec // The file path is generated by os.CreateTemp and is not user-controllable
err := os.Remove(name)
if err != nil {
slog.Error("failed to remove temporary file", slog.Any("err", err))
}
}(file.Name()) // Clean up
err = DownloadFile( err = DownloadFile(
u.String(), u.String(),
@ -134,30 +123,20 @@ func (v *Version) AddProtocol(p string) error {
// DownloadFile will download a url and store it in local filepath. // DownloadFile will download a url and store it in local filepath.
// It writes to the destination file as it downloads it, without // It writes to the destination file as it downloads it, without
// loading the entire file into memory. // loading the entire file into memory.
func DownloadFile(urlValue, filepath string) error { func DownloadFile(url string, filepath string) error {
// Create the file // Create the file
//nolint:gosec // path traversal is not a concern here, as the filepath is generated by us and not user input
out, err := os.Create(filepath) out, err := os.Create(filepath)
if err != nil { if err != nil {
return err return err
} }
defer func(out *os.File) { defer out.Close()
err := out.Close()
if err != nil {
slog.Error("failed to close file", slog.Any("err", err))
}
}(out)
// Get the data // Get the data
resp, err := http.Get(url)
//nolint:gosec,bodyclose // this is a controlled URL, not user input
resp, err := http.Get(urlValue)
if err != nil { if err != nil {
return err return err
} }
defer func(Body io.ReadCloser) { defer resp.Body.Close()
_ = Body.Close()
}(resp.Body)
// Write the body to file // Write the body to file
_, err = io.Copy(out, resp.Body) _, err = io.Copy(out, resp.Body)

View file

@ -11,7 +11,7 @@ import (
"github.com/spf13/cobra" "github.com/spf13/cobra"
publish2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/generator/cmd/publish" publish2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/cmd/cmd/publish"
) )
var ( var (
@ -29,32 +29,20 @@ var publishCmd = &cobra.Command{
Use: "publish", Use: "publish",
Short: "Publish terraform provider", Short: "Publish terraform provider",
Long: `...`, Long: `...`,
RunE: func(_ *cobra.Command, _ []string) error { RunE: func(_ *cobra.Command, args []string) error {
return publish() return publish()
}, },
} }
func init() { //nolint:gochecknoinits //this is the standard way to set up cobra commands func init() { // nolint: gochecknoinits
publishCmd.Flags().StringVarP(&namespace, "namespace", "n", "", "Namespace for the Terraform registry.") publishCmd.Flags().StringVarP(&namespace, "namespace", "n", "", "Namespace for the Terraform registry.")
publishCmd.Flags().StringVarP(&domain, "domain", "d", "", "Domain for the Terraform registry.") publishCmd.Flags().StringVarP(&domain, "domain", "d", "", "Domain for the Terraform registry.")
publishCmd.Flags().StringVarP(&providerName, "providerName", "p", "", "ProviderName for the Terraform registry.") publishCmd.Flags().StringVarP(&providerName, "providerName", "p", "", "ProviderName for the Terraform registry.")
publishCmd.Flags().StringVarP(&distPath, "distPath", "x", "dist", "Dist Path for the Terraform registry.") publishCmd.Flags().StringVarP(&distPath, "distPath", "x", "dist", "Dist Path for the Terraform registry.")
publishCmd.Flags().StringVarP(&repoName, "repoName", "r", "", "RepoName for the Terraform registry.") publishCmd.Flags().StringVarP(&repoName, "repoName", "r", "", "RepoName for the Terraform registry.")
publishCmd.Flags().StringVarP(&version, "version", "v", "", "Version for the Terraform registry.") publishCmd.Flags().StringVarP(&version, "version", "v", "", "Version for the Terraform registry.")
publishCmd.Flags().StringVarP( publishCmd.Flags().StringVarP(&gpgFingerprint, "gpgFingerprint", "f", "", "GPG Fingerprint for the Terraform registry.")
&gpgFingerprint, publishCmd.Flags().StringVarP(&gpgPubKeyFile, "gpgPubKeyFile", "k", "", "GPG PubKey file name for the Terraform registry.")
"gpgFingerprint",
"f",
"",
"GPG Fingerprint for the Terraform registry.",
)
publishCmd.Flags().StringVarP(
&gpgPubKeyFile,
"gpgPubKeyFile",
"k",
"",
"GPG PubKey file name for the Terraform registry.",
)
err := publishCmd.MarkFlagRequired("namespace") err := publishCmd.MarkFlagRequired("namespace")
if err != nil { if err != nil {
@ -117,7 +105,6 @@ func publish() error {
// Create release dir - only the contents of this need to be uploaded to S3 // Create release dir - only the contents of this need to be uploaded to S3
log.Printf("* Creating release directory") log.Printf("* Creating release directory")
//nolint:gosec // this directory is not sensitive, so we can use 0750
err = os.MkdirAll(path.Join(p.RootPath, "release"), os.ModePerm) err = os.MkdirAll(path.Join(p.RootPath, "release"), os.ModePerm)
if err != nil && !errors.Is(err, fs.ErrExist) { if err != nil && !errors.Is(err, fs.ErrExist) {
return fmt.Errorf("error creating '%s' dir: %w", path.Join(p.RootPath, "release"), err) return fmt.Errorf("error creating '%s' dir: %w", path.Join(p.RootPath, "release"), err)

View file

@ -8,7 +8,7 @@ import (
"github.com/SladkyCitron/slogcolor" "github.com/SladkyCitron/slogcolor"
cc "github.com/ivanpirog/coloredcobra" cc "github.com/ivanpirog/coloredcobra"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/generator/cmd" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/cmd/cmd"
) )
func main() { func main() {

View file

@ -31,8 +31,8 @@ data "stackitprivatepreview_postgresflexalpha_instance" "example" {
### Read-Only ### Read-Only
- `acl` (List of String) List of IPV4 cidr. - `acl` (List of String) List of IPV4 cidr.
- `backup_schedule` (String) The schedule for when the database backup will be created. Currently, ONLY daily schedules are supported (every 24 hours). The schedule is written as a cron schedule. - `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
- `connection_info` (Attributes) The connection information of the instance (see [below for nested schema](#nestedatt--connection_info)) - `connection_info` (Attributes) The DNS name and port in the instance overview (see [below for nested schema](#nestedatt--connection_info))
- `encryption` (Attributes) The configuration for instance's volume and backup storage encryption. - `encryption` (Attributes) The configuration for instance's volume and backup storage encryption.
⚠︝ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected. (see [below for nested schema](#nestedatt--encryption)) ⚠︝ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected. (see [below for nested schema](#nestedatt--encryption))
@ -52,18 +52,10 @@ data "stackitprivatepreview_postgresflexalpha_instance" "example" {
Read-Only: Read-Only:
- `write` (Attributes) The DNS name and port in the instance overview (see [below for nested schema](#nestedatt--connection_info--write))
<a id="nestedatt--connection_info--write"></a>
### Nested Schema for `connection_info.write`
Read-Only:
- `host` (String) The host of the instance. - `host` (String) The host of the instance.
- `port` (Number) The port of the instance. - `port` (Number) The port of the instance.
<a id="nestedatt--encryption"></a> <a id="nestedatt--encryption"></a>
### Nested Schema for `encryption` ### Nested Schema for `encryption`

View file

@ -0,0 +1,54 @@
---
# generated by https://github.com/hashicorp/terraform-plugin-docs
page_title: "stackitprivatepreview_sqlserverflexalpha_flavor Data Source - stackitprivatepreview"
subcategory: ""
description: |-
---
# stackitprivatepreview_sqlserverflexalpha_flavor (Data Source)
## Example Usage
```terraform
data "stackitprivatepreview_sqlserverflexalpha_flavor" "flavor" {
project_id = var.project_id
region = var.region
cpu = 4
ram = 16
node_type = "Single"
storage_class = "premium-perf2-stackit"
}
```
<!-- schema generated by tfplugindocs -->
## Schema
### Required
- `cpu` (Number) The cpu count of the instance.
- `node_type` (String) defines the nodeType it can be either single or HA
- `project_id` (String) The project ID of the flavor.
- `ram` (Number) The memory of the instance in Gibibyte.
- `region` (String) The region of the flavor.
- `storage_class` (String) The memory of the instance in Gibibyte.
### Read-Only
- `description` (String) The flavor description.
- `flavor_id` (String) The id of the instance flavor.
- `id` (String) The id of the instance flavor.
- `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte.
- `min_gb` (Number) minimum storage which is required to order in Gigabyte.
- `storage_classes` (Attributes List) maximum storage which can be ordered for the flavor in Gigabyte. (see [below for nested schema](#nestedatt--storage_classes))
<a id="nestedatt--storage_classes"></a>
### Nested Schema for `storage_classes`
Read-Only:
- `class` (String)
- `max_io_per_sec` (Number)
- `max_through_in_mb` (Number)

View file

@ -0,0 +1,54 @@
---
# generated by https://github.com/hashicorp/terraform-plugin-docs
page_title: "stackitprivatepreview_sqlserverflexbeta_flavor Data Source - stackitprivatepreview"
subcategory: ""
description: |-
---
# stackitprivatepreview_sqlserverflexbeta_flavor (Data Source)
## Example Usage
```terraform
data "stackitprivatepreview_sqlserverflexbeta_flavor" "flavor" {
project_id = var.project_id
region = var.region
cpu = 4
ram = 16
node_type = "Single"
storage_class = "premium-perf2-stackit"
}
```
<!-- schema generated by tfplugindocs -->
## Schema
### Required
- `cpu` (Number) The cpu count of the instance.
- `node_type` (String) defines the nodeType it can be either single or HA
- `project_id` (String) The project ID of the flavor.
- `ram` (Number) The memory of the instance in Gibibyte.
- `region` (String) The region of the flavor.
- `storage_class` (String) The memory of the instance in Gibibyte.
### Read-Only
- `description` (String) The flavor description.
- `flavor_id` (String) The id of the instance flavor.
- `id` (String) The id of the instance flavor.
- `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte.
- `min_gb` (Number) minimum storage which is required to order in Gigabyte.
- `storage_classes` (Attributes List) maximum storage which can be ordered for the flavor in Gigabyte. (see [below for nested schema](#nestedatt--storage_classes))
<a id="nestedatt--storage_classes"></a>
### Nested Schema for `storage_classes`
Read-Only:
- `class` (String)
- `max_io_per_sec` (Number)
- `max_through_in_mb` (Number)

View file

@ -25,16 +25,6 @@ import {
to = stackitprivatepreview_postgresflexalpha_database.import-example to = stackitprivatepreview_postgresflexalpha_database.import-example
id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.postgres_database_id}" id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.postgres_database_id}"
} }
import {
to = stackitprivatepreview_postgresflexalpha_database.import-example
identity = {
project_id = "project_id"
region = "region"
instance_id = "instance_id"
database_id = "database_id"
}
}
``` ```
<!-- schema generated by tfplugindocs --> <!-- schema generated by tfplugindocs -->

View file

@ -13,7 +13,7 @@ description: |-
## Example Usage ## Example Usage
```terraform ```terraform
resource "stackitprivatepreview_postgresflexalpha_instance" "example-instance" { resource "stackitprivatepreview_postgresflexalpha_instance" "msh-instance-only" {
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
name = "example-instance" name = "example-instance"
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"] acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
@ -59,7 +59,7 @@ import {
### Required ### Required
- `backup_schedule` (String) The schedule for when the database backup will be created. Currently, ONLY daily schedules are supported (every 24 hours). The schedule is written as a cron schedule. - `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
- `flavor_id` (String) The id of the instance flavor. - `flavor_id` (String) The id of the instance flavor.
- `name` (String) The name of the instance. - `name` (String) The name of the instance.
- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network)) - `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
@ -80,7 +80,7 @@ import {
### Read-Only ### Read-Only
- `acl` (List of String) List of IPV4 cidr. - `acl` (List of String) List of IPV4 cidr.
- `connection_info` (Attributes) The connection information of the instance (see [below for nested schema](#nestedatt--connection_info)) - `connection_info` (Attributes) The DNS name and port in the instance overview (see [below for nested schema](#nestedatt--connection_info))
- `id` (String) The ID of the instance. - `id` (String) The ID of the instance.
- `is_deletable` (Boolean) Whether the instance can be deleted or not. - `is_deletable` (Boolean) Whether the instance can be deleted or not.
- `status` (String) The current status of the instance. - `status` (String) The current status of the instance.
@ -127,12 +127,5 @@ Required:
Read-Only: Read-Only:
- `write` (Attributes) The DNS name and port in the instance overview (see [below for nested schema](#nestedatt--connection_info--write))
<a id="nestedatt--connection_info--write"></a>
### Nested Schema for `connection_info.write`
Read-Only:
- `host` (String) The host of the instance. - `host` (String) The host of the instance.
- `port` (Number) The port of the instance. - `port` (Number) The port of the instance.

View file

@ -25,16 +25,6 @@ import {
to = stackitprivatepreview_postgresflexalpha_user.import-example to = stackitprivatepreview_postgresflexalpha_user.import-example
id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.user_id}" id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.user_id}"
} }
import {
to = stackitprivatepreview_postgresflexalpha_user.import-example
identity = {
project_id = "project.id"
region = "region"
instance_id = "instance.id"
user_id = "user.id"
}
}
``` ```
<!-- schema generated by tfplugindocs --> <!-- schema generated by tfplugindocs -->

View file

@ -10,34 +10,7 @@ description: |-
## Example Usage
```terraform
resource "stackitprivatepreview_sqlserverflexalpha_database" "example" {
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
collation = ""
compatibility = "160"
name = ""
owner = ""
}
# Only use the import statement, if you want to import a existing sqlserverflex database
import {
to = stackitprivatepreview_sqlserverflexalpha_database.import-example
id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
}
import {
to = stackitprivatepreview_sqlserverflexalpha_database.import-example
identity = {
project_id = "project.id"
region = "region"
instance_id = "instance.id"
database_id = "database.id"
}
}
```
<!-- schema generated by tfplugindocs --> <!-- schema generated by tfplugindocs -->
## Schema ## Schema

View file

@ -10,22 +10,7 @@ description: |-
## Example Usage
```terraform
resource "stackitprivatepreview_sqlserverflexalpha_user" "example" {
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
username = "username"
roles = ["role"]
}
# Only use the import statement, if you want to import an existing sqlserverflex user
import {
to = stackitprivatepreview_sqlserverflexalpha_user.import-example
id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
}
```
<!-- schema generated by tfplugindocs --> <!-- schema generated by tfplugindocs -->
## Schema ## Schema

View file

@ -10,22 +10,7 @@ description: |-
## Example Usage
```terraform
resource "stackitprivatepreview_sqlserverflexalpha_user" "example" {
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
username = "username"
roles = ["role"]
}
# Only use the import statement, if you want to import an existing sqlserverflex user
import {
to = stackitprivatepreview_sqlserverflexalpha_user.import-example
id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
}
```
<!-- schema generated by tfplugindocs --> <!-- schema generated by tfplugindocs -->
## Schema ## Schema

View file

@ -1,346 +0,0 @@
package build
import (
"errors"
"fmt"
"go/ast"
"go/parser"
"go/token"
"log/slog"
"os"
"os/exec"
"path"
"regexp"
"strings"
)
type Builder struct {
rootDir string
SkipClone bool
SkipCleanup bool
PackagesOnly bool
Verbose bool
Debug bool
}
func (b *Builder) Build() error {
slog.Info("Starting Builder")
if b.PackagesOnly {
slog.Info(" >>> only generating pkg_gen <<<")
}
rootErr := b.determineRoot()
if rootErr != nil {
return rootErr
}
if !b.PackagesOnly {
if b.Verbose {
slog.Info(" ... Checking needed commands available")
}
chkErr := checkCommands([]string{})
if chkErr != nil {
return chkErr
}
}
// if !b.SkipCleanup {
// slog.Info("Cleaning up old packages directory")
// err := os.RemoveAll(path.Join(b.rootDir, "pkg_gen"))
// if err != nil {
// return err
// }
//}
//
// if !b.SkipCleanup && !b.PackagesOnly {
// slog.Info("Cleaning up old packages directory")
// err := os.RemoveAll(path.Join(b.rootDir, "pkg_gen"))
// if err != nil {
// return err
// }
//}
// slog.Info("Creating generator dir", "dir", fmt.Sprintf("%s/%s", *root, GEN_REPO_NAME))
// genDir := path.Join(*root, GEN_REPO_NAME)
// if !b.SkipClone {
// err = createGeneratorDir(GEN_REPO, genDir, b.SkipClone)
// if err != nil {
// return err
// }
//}
oasHandlerErr := b.oasHandler(path.Join(b.rootDir, "service_specs"))
if oasHandlerErr != nil {
return oasHandlerErr
}
// if !b.PackagesOnly {
// slog.Info("Generating service boilerplate")
// err = generateServiceFiles(*root, path.Join(*root, GEN_REPO_NAME))
// if err != nil {
// return err
// }
//
// slog.Info("Copying all service files")
// err = CopyDirectory(
// path.Join(*root, "generated", "internal", "services"),
// path.Join(*root, "stackit", "internal", "services"),
// )
// if err != nil {
// return err
// }
//
// err = createBoilerplate(*root, path.Join(*root, "stackit", "internal", "services"))
// if err != nil {
// return err
// }
//}
// workaround to remove linter complain :D
if b.PackagesOnly && b.Verbose && b.SkipClone && b.SkipCleanup {
bpErr := createBoilerplate(b.rootDir, "boilerplate")
if bpErr != nil {
return bpErr
}
}
slog.Info("Done")
return nil
}
type templateData struct {
PackageName string
PackageNameCamel string
PackageNamePascal string
NameCamel string
NamePascal string
NameSnake string
Fields []string
}
func createBoilerplate(rootFolder, folder string) error {
services, err := os.ReadDir(folder)
if err != nil {
return err
}
for _, svc := range services {
if !svc.IsDir() {
continue
}
resources, err := os.ReadDir(path.Join(folder, svc.Name()))
if err != nil {
return err
}
var handleDS bool
var handleRes bool
var foundDS bool
var foundRes bool
for _, res := range resources {
if !res.IsDir() {
continue
}
resourceName := res.Name()
dsFile := path.Join(
folder,
svc.Name(),
res.Name(),
"datasources_gen",
fmt.Sprintf("%s_data_source_gen.go", res.Name()),
)
handleDS = FileExists(dsFile)
resFile := path.Join(
folder,
svc.Name(),
res.Name(),
"resources_gen",
fmt.Sprintf("%s_resource_gen.go", res.Name()),
)
handleRes = FileExists(resFile)
dsGoFile := path.Join(folder, svc.Name(), res.Name(), "datasource.go")
foundDS = FileExists(dsGoFile)
resGoFile := path.Join(folder, svc.Name(), res.Name(), "resource.go")
foundRes = FileExists(resGoFile)
if handleDS && !foundDS {
slog.Info(" creating missing datasource.go", "service", svc.Name(), "resource", resourceName)
if !ValidateSnakeCase(resourceName) {
return errors.New("resource name is invalid")
}
fields, tokenErr := getTokens(dsFile)
if tokenErr != nil {
return fmt.Errorf("error reading tokens: %w", tokenErr)
}
tplName := "data_source_scaffold.gotmpl"
err = writeTemplateToFile(
tplName,
path.Join(rootFolder, "cmd", "cmd", "build", "templates", tplName),
dsGoFile,
&templateData{
PackageName: svc.Name(),
PackageNameCamel: ToCamelCase(svc.Name()),
PackageNamePascal: ToPascalCase(svc.Name()),
NameCamel: ToCamelCase(resourceName),
NamePascal: ToPascalCase(resourceName),
NameSnake: resourceName,
Fields: fields,
},
)
if err != nil {
panic(err)
}
}
if handleRes && !foundRes {
slog.Info(" creating missing resource.go", "service", svc.Name(), "resource", resourceName)
if !ValidateSnakeCase(resourceName) {
return errors.New("resource name is invalid")
}
fields, tokenErr := getTokens(resFile)
if tokenErr != nil {
return fmt.Errorf("error reading tokens: %w", tokenErr)
}
tplName := "resource_scaffold.gotmpl"
err = writeTemplateToFile(
tplName,
path.Join(rootFolder, "cmd", "cmd", "build", "templates", tplName),
resGoFile,
&templateData{
PackageName: svc.Name(),
PackageNameCamel: ToCamelCase(svc.Name()),
PackageNamePascal: ToPascalCase(svc.Name()),
NameCamel: ToCamelCase(resourceName),
NamePascal: ToPascalCase(resourceName),
NameSnake: resourceName,
Fields: fields,
},
)
if err != nil {
return err
}
if !FileExists(path.Join(folder, svc.Name(), res.Name(), "functions.go")) {
slog.Info(" creating missing functions.go", "service", svc.Name(), "resource", resourceName)
if !ValidateSnakeCase(resourceName) {
return errors.New("resource name is invalid")
}
fncTplName := "functions_scaffold.gotmpl"
err = writeTemplateToFile(
fncTplName,
path.Join(rootFolder, "cmd", "cmd", "build", "templates", fncTplName),
path.Join(folder, svc.Name(), res.Name(), "functions.go"),
&templateData{
PackageName: svc.Name(),
PackageNameCamel: ToCamelCase(svc.Name()),
PackageNamePascal: ToPascalCase(svc.Name()),
NameCamel: ToCamelCase(resourceName),
NamePascal: ToPascalCase(resourceName),
NameSnake: resourceName,
},
)
if err != nil {
return err
}
}
}
}
}
return nil
}
func handleLine(line string) (string, error) {
schemaRegex := regexp.MustCompile(`(\s+")(id)(": schema.[a-zA-Z0-9]+Attribute{)`)
schemaMatches := schemaRegex.FindAllStringSubmatch(line, -1)
if schemaMatches != nil {
return fmt.Sprintf("%stf_original_api_id%s", schemaMatches[0][1], schemaMatches[0][3]), nil
}
modelRegex := regexp.MustCompile(`(\s+Id\s+types.[a-zA-Z0-9]+\s+.tfsdk:")(id)(".)`)
modelMatches := modelRegex.FindAllStringSubmatch(line, -1)
if modelMatches != nil {
return fmt.Sprintf("%stf_original_api_id%s", modelMatches[0][1], modelMatches[0][3]), nil
}
return line, nil
}
func (b *Builder) determineRoot() error {
cmd := exec.Command("git", "rev-parse", "--show-toplevel")
out, err := cmd.Output()
if err != nil {
return err
}
lines := strings.Split(string(out), "\n")
if lines[0] == "" {
return fmt.Errorf("unable to determine root directory from git")
}
b.rootDir = lines[0]
if b.Verbose {
slog.Info(" ... using root", "dir", b.rootDir)
}
return nil
}
// func createGeneratorDir(repoUrl, targetDir string, skipClone bool) error {
// if !skipClone {
// if FileExists(targetDir) {
// remErr := os.RemoveAll(targetDir)
// if remErr != nil {
// return remErr
// }
// }
// _, cloneErr := git.Clone(
// clone.Repository(repoUrl),
// clone.Directory(targetDir),
// )
// if cloneErr != nil {
// return cloneErr
// }
// }
// return nil
//}
func getTokens(fileName string) ([]string, error) {
fset := token.NewFileSet()
var result []string
node, err := parser.ParseFile(fset, fileName, nil, parser.ParseComments)
if err != nil {
return nil, err
}
ast.Inspect(
node, func(n ast.Node) bool {
// Suche nach Typ-Deklarationen (structs)
ts, ok := n.(*ast.TypeSpec)
if ok {
if strings.Contains(ts.Name.Name, "Model") {
ast.Inspect(
ts, func(sn ast.Node) bool {
tts, tok := sn.(*ast.Field)
if tok {
result = append(result, tts.Names[0].String())
}
return true
},
)
}
}
return true
},
)
return result, nil
}

View file

@ -1,120 +0,0 @@
package build
import (
"fmt"
"log/slog"
"os"
"os/exec"
"strings"
"text/template"
)
func FileExists(pathValue string) bool {
_, err := os.Stat(pathValue)
if os.IsNotExist(err) {
return false
}
if err != nil {
panic(err)
}
return true
}
func ucfirst(s string) string {
if s == "" {
return ""
}
return strings.ToUpper(s[:1]) + s[1:]
}
func writeTemplateToFile(tplName, tplFile, outFile string, data *templateData) error {
fn := template.FuncMap{
"ucfirst": ucfirst,
}
tmpl, err := template.New(tplName).Funcs(fn).ParseFiles(tplFile)
if err != nil {
return err
}
var f *os.File
f, err = os.Create(outFile)
if err != nil {
return err
}
err = tmpl.Execute(f, *data)
if err != nil {
return err
}
err = f.Close()
if err != nil {
return err
}
return nil
}
/* saved for later
func deleteFiles(fNames ...string) error {
for _, fName := range fNames {
if _, err := os.Stat(fName); !os.IsNotExist(err) {
err = os.Remove(fName)
if err != nil {
return err
}
}
}
return nil
}
func copyFile(src, dst string) (int64, error) {
sourceFileStat, err := os.Stat(src)
if err != nil {
return 0, err
}
if !sourceFileStat.Mode().IsRegular() {
return 0, fmt.Errorf("%s is not a regular file", src)
}
source, err := os.Open(src)
if err != nil {
return 0, err
}
defer func(source *os.File) {
err := source.Close()
if err != nil {
slog.Error("copyFile", "err", err)
}
}(source)
destination, err := os.Create(dst)
if err != nil {
return 0, err
}
defer func(destination *os.File) {
err := destination.Close()
if err != nil {
slog.Error("copyFile", "err", err)
}
}(destination)
nBytes, err := io.Copy(destination, source)
return nBytes, err
}
*/
func checkCommands(commands []string) error {
for _, commandName := range commands {
if !commandExists(commandName) {
return fmt.Errorf("missing command %s", commandName)
}
slog.Info(" found", "command", commandName)
}
return nil
}
func commandExists(cmd string) bool {
_, err := exec.LookPath(cmd)
return err == nil
}

View file

@ -1,446 +0,0 @@
package build
import (
"bufio"
"bytes"
"errors"
"fmt"
"log"
"log/slog"
"os"
"os/exec"
"path"
"regexp"
"strings"
"gopkg.in/yaml.v3"
"github.com/ldez/go-git-cmd-wrapper/v2/clone"
"github.com/ldez/go-git-cmd-wrapper/v2/git"
)
const (
OasRepoName = "stackit-api-specifications"
OasRepo = "https://github.com/stackitcloud/stackit-api-specifications.git"
ResTypeResource = "resources"
ResTypeDataSource = "datasources"
)
type Data struct {
ServiceName string `yaml:",omitempty" json:",omitempty"`
Versions []Version `yaml:"versions" json:"versions"`
}
type Version struct {
Name string `yaml:"name" json:"name"`
Path string `yaml:"path" json:"path"`
}
var oasTempDir string
func (b *Builder) oasHandler(specDir string) error {
if b.Verbose {
slog.Info("creating schema files", "dir", specDir)
}
if _, err := os.Stat(specDir); os.IsNotExist(err) {
return fmt.Errorf("spec files directory does not exist")
}
err := b.createRepoDir(b.SkipClone)
if err != nil {
return fmt.Errorf("%s", err.Error())
}
err2 := b.handleServices(specDir)
if err2 != nil {
return err2
}
if !b.SkipCleanup {
if b.Verbose {
slog.Info("Finally removing temporary files and directories")
}
err := os.RemoveAll(path.Join(b.rootDir, "generated"))
if err != nil {
slog.Error("RemoveAll", "dir", path.Join(b.rootDir, "generated"), "err", err)
return err
}
err = os.RemoveAll(oasTempDir)
if err != nil {
slog.Error("RemoveAll", "dir", oasTempDir, "err", err)
return err
}
}
return nil
}
func (b *Builder) handleServices(specDir string) error {
services, err := os.ReadDir(specDir)
if err != nil {
return err
}
for _, svc := range services {
if !svc.IsDir() {
continue
}
if b.Verbose {
slog.Info(" ... found", "service", svc.Name())
}
var svcVersions Data
svcVersions.ServiceName = svc.Name()
versionsErr := b.getServiceVersions(path.Join(specDir, svc.Name(), "generator_settings.yml"), &svcVersions)
if versionsErr != nil {
return versionsErr
}
oasSpecErr := b.generateServiceFiles(&svcVersions)
if oasSpecErr != nil {
return oasSpecErr
}
}
return nil
}
func (b *Builder) getServiceVersions(confFile string, data *Data) error {
if _, cfgFileErr := os.Stat(confFile); os.IsNotExist(cfgFileErr) {
return fmt.Errorf("config file does not exist")
}
fileContent, fileErr := os.ReadFile(confFile)
if fileErr != nil {
return fileErr
}
convErr := yaml.Unmarshal(fileContent, &data)
if convErr != nil {
return convErr
}
return nil
}
func (b *Builder) createRepoDir(skipClone bool) error {
tmpDirName, err := os.MkdirTemp("", "oasbuild")
if err != nil {
return err
}
oasTempDir = path.Join(tmpDirName, OasRepoName)
slog.Info("Creating oas repo dir", "dir", oasTempDir)
if !skipClone {
if FileExists(oasTempDir) {
slog.Warn("target dir exists - skipping", "targetDir", oasTempDir)
return nil
}
out, cloneErr := git.Clone(
clone.Repository(OasRepo),
clone.Directory(oasTempDir),
)
if cloneErr != nil {
slog.Error("git clone error", "output", out)
return cloneErr
}
if b.Verbose {
slog.Info("git clone result", "output", out)
}
}
return nil
}
func (b *Builder) generateServiceFiles(data *Data) error {
err := os.MkdirAll(path.Join(b.rootDir, "generated", "specs"), 0o750)
if err != nil {
return err
}
for _, v := range data.Versions {
specFiles, specsErr := os.ReadDir(path.Join(b.rootDir, "service_specs", data.ServiceName, v.Name))
if specsErr != nil {
return specsErr
}
for _, specFile := range specFiles {
if specFile.IsDir() {
continue
}
r := regexp.MustCompile(`^(.*)_config.yml$`)
matches := r.FindAllStringSubmatch(specFile.Name(), -1)
if matches == nil {
slog.Warn(" skipping file (no regex match)", "file", specFile.Name())
continue
}
srcSpecFile := path.Join(b.rootDir, "service_specs", data.ServiceName, v.Name, specFile.Name())
if matches[0][0] != specFile.Name() {
return fmt.Errorf("matched filename differs from original filename - this should not happen")
}
resource := matches[0][1]
if b.Verbose {
slog.Info(
" found service spec",
"service",
data.ServiceName,
"resource",
resource,
"file",
specFile.Name(),
)
}
oasFile := path.Join(
oasTempDir,
"services",
data.ServiceName,
v.Path,
fmt.Sprintf("%s.json", data.ServiceName),
)
if _, oasErr := os.Stat(oasFile); os.IsNotExist(oasErr) {
slog.Warn(
" could not find matching oas",
"svc",
data.ServiceName,
"version",
v.Name,
)
continue
}
// determine correct target service name
scName := fmt.Sprintf("%s%s", data.ServiceName, v.Name)
scName = strings.ReplaceAll(scName, "-", "")
specJSONFile := path.Join(
b.rootDir,
"generated",
"specs",
fmt.Sprintf("%s_%s_spec.json", scName, resource),
)
cmdErr := b.runTerraformPluginGenOpenAPI(srcSpecFile, specJSONFile, oasFile)
if cmdErr != nil {
return cmdErr
}
cmdResGenErr := b.runTerraformPluginGenFramework(ResTypeResource, scName, resource, specJSONFile)
if cmdResGenErr != nil {
return cmdResGenErr
}
cmdDsGenErr := b.runTerraformPluginGenFramework(ResTypeDataSource, scName, resource, specJSONFile)
if cmdDsGenErr != nil {
return cmdDsGenErr
}
}
}
return nil
}
func (b *Builder) runTerraformPluginGenFramework(resType, svcName, resource, specJSONFile string) error {
var stdOut, stdErr bytes.Buffer
tgtFolder := path.Join(
b.rootDir,
"stackit",
"internal",
"services",
svcName,
resource,
fmt.Sprintf("%s_gen", resType),
)
//nolint:gosec // this file is not sensitive, so we can use 0755
err := os.MkdirAll(tgtFolder, 0o755)
if err != nil {
return err
}
var subCmd string
switch resType {
case ResTypeResource:
subCmd = "resources"
case ResTypeDataSource:
subCmd = "data-sources"
default:
return fmt.Errorf("unknown resource type given: %s", resType)
}
// nolint:gosec // #nosec this command is not using any untrusted input, so we can ignore gosec warning
cmd := exec.Command(
"tfplugingen-framework",
"generate",
subCmd,
"--input",
specJSONFile,
"--output",
tgtFolder,
"--package",
svcName,
)
cmd.Stdout = &stdOut
cmd.Stderr = &stdErr
if err = cmd.Start(); err != nil {
slog.Error(fmt.Sprintf("tfplugingen-framework generate %s", resType), "error", err)
return err
}
if err = cmd.Wait(); err != nil {
var exitErr *exec.ExitError
if errors.As(err, &exitErr) {
slog.Error(
fmt.Sprintf("tfplugingen-framework generate %s", resType),
"code",
exitErr.ExitCode(),
"error",
err,
"stdout",
stdOut.String(),
"stderr",
stdErr.String(),
)
return fmt.Errorf("%s", stdErr.String())
}
if err != nil {
slog.Error(
fmt.Sprintf("tfplugingen-framework generate %s", resType),
"err",
err,
"stdout",
stdOut.String(),
"stderr",
stdErr.String(),
)
return err
}
}
if resType == ResTypeDataSource {
tfAnoErr := b.handleTfTagForDatasourceFile(
path.Join(tgtFolder, fmt.Sprintf("%s_data_source_gen.go", resource)),
svcName,
resource,
)
if tfAnoErr != nil {
return tfAnoErr
}
}
return nil
}
func (b *Builder) runTerraformPluginGenOpenAPI(srcSpecFile, specJSONFile, oasFile string) error {
var stdOut, stdErr bytes.Buffer
// nolint:gosec // #nosec this command is not using any untrusted input, so we can ignore gosec warning
cmd := exec.Command(
"tfplugingen-openapi",
"generate",
"--config",
srcSpecFile,
"--output",
specJSONFile,
oasFile,
)
cmd.Stdout = &stdOut
cmd.Stderr = &stdErr
if err := cmd.Start(); err != nil {
slog.Error(
"tfplugingen-openapi generate",
"error",
err,
"stdOut",
stdOut.String(),
"stdErr",
stdErr.String(),
)
return err
}
if err := cmd.Wait(); err != nil {
var exitErr *exec.ExitError
if errors.As(err, &exitErr) {
slog.Error(
"tfplugingen-openapi generate",
"code",
exitErr.ExitCode(),
"error",
err,
"stdout",
stdOut.String(),
"stderr",
stdErr.String(),
)
return fmt.Errorf("%s", stdErr.String())
}
if err != nil {
slog.Error(
"tfplugingen-openapi generate",
"err",
err,
"stdout",
stdOut.String(),
"stderr",
stdErr.String(),
)
return err
}
}
if stdOut.Len() > 0 {
slog.Warn(" command output", "stdout", stdOut.String(), "stderr", stdErr.String())
}
return nil
}
// handleTfTagForDatasourceFile replaces existing "id" with "stf_original_api_id"
func (b *Builder) handleTfTagForDatasourceFile(filePath, service, resource string) error {
if b.Verbose {
slog.Info(" handle terraform tag for datasource", "service", service, "resource", resource)
}
if !FileExists(filePath) {
slog.Warn(" could not find file, skipping", "path", filePath)
return nil
}
f, err := os.Open(filePath)
if err != nil {
return err
}
tmp, err := os.CreateTemp(b.rootDir, "replace-*")
if err != nil {
return err
}
sc := bufio.NewScanner(f)
for sc.Scan() {
resLine, err := handleLine(sc.Text())
if err != nil {
return err
}
if _, err := tmp.WriteString(resLine + "\n"); err != nil {
return err
}
}
if scErr := sc.Err(); scErr != nil {
return scErr
}
if err := tmp.Close(); err != nil {
return err
}
if err := f.Close(); err != nil {
return err
}
//nolint:gosec // path traversal is not a concern here
if err := os.Rename(tmp.Name(), filePath); err != nil {
log.Fatal(err)
}
return nil
}

238
go.mod
View file

@ -4,285 +4,83 @@ go 1.25.6
require ( require (
github.com/SladkyCitron/slogcolor v1.8.0 github.com/SladkyCitron/slogcolor v1.8.0
github.com/golang-jwt/jwt/v5 v5.3.1
github.com/google/go-cmp v0.7.0 github.com/google/go-cmp v0.7.0
github.com/google/uuid v1.6.0 github.com/google/uuid v1.6.0
github.com/hashicorp/terraform-plugin-framework v1.18.0 github.com/hashicorp/terraform-plugin-framework v1.17.0
github.com/hashicorp/terraform-plugin-framework-validators v0.19.0 github.com/hashicorp/terraform-plugin-framework-validators v0.19.0
github.com/hashicorp/terraform-plugin-go v0.30.0 github.com/hashicorp/terraform-plugin-go v0.29.0
github.com/hashicorp/terraform-plugin-log v0.10.0 github.com/hashicorp/terraform-plugin-log v0.10.0
github.com/hashicorp/terraform-plugin-testing v1.14.0 github.com/hashicorp/terraform-plugin-testing v1.14.0
github.com/iancoleman/strcase v0.3.0 github.com/iancoleman/strcase v0.3.0
github.com/ivanpirog/coloredcobra v1.0.1 github.com/ivanpirog/coloredcobra v1.0.1
github.com/jarcoal/httpmock v1.4.1
github.com/joho/godotenv v1.5.1 github.com/joho/godotenv v1.5.1
github.com/ldez/go-git-cmd-wrapper/v2 v2.9.1 github.com/ldez/go-git-cmd-wrapper/v2 v2.9.1
github.com/spf13/cobra v1.10.2 github.com/spf13/cobra v1.10.2
github.com/stackitcloud/stackit-sdk-go/core v0.22.0 github.com/stackitcloud/stackit-sdk-go/core v0.21.1
github.com/stackitcloud/stackit-sdk-go/services/postgresflex v1.4.0 github.com/stackitcloud/stackit-sdk-go/services/iaasalpha v0.1.23-alpha
github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex v1.5.0
github.com/teambition/rrule-go v1.8.2 github.com/teambition/rrule-go v1.8.2
gopkg.in/yaml.v3 v3.0.1 gopkg.in/yaml.v3 v3.0.1
) )
require github.com/hashicorp/go-retryablehttp v0.7.8 // indirect require (
github.com/hashicorp/go-retryablehttp v0.7.8 // indirect
golang.org/x/telemetry v0.0.0-20260213145524-e0ab670178e1 // indirect
)
require ( require (
4d63.com/gocheckcompilerdirectives v1.3.0 // indirect
4d63.com/gochecknoglobals v0.2.2 // indirect
codeberg.org/chavacava/garif v0.2.0 // indirect
codeberg.org/polyfloyd/go-errorlint v1.9.0 // indirect
dario.cat/mergo v1.0.1 // indirect dario.cat/mergo v1.0.1 // indirect
dev.gaijin.team/go/exhaustruct/v4 v4.0.0 // indirect github.com/ProtonMail/go-crypto v1.3.0 // indirect
dev.gaijin.team/go/golib v0.6.0 // indirect
github.com/4meepo/tagalign v1.4.3 // indirect
github.com/Abirdcfly/dupword v0.1.7 // indirect
github.com/AdminBenni/iota-mixing v1.0.0 // indirect
github.com/AlwxSin/noinlineerr v1.0.5 // indirect
github.com/Antonboom/errname v1.1.1 // indirect
github.com/Antonboom/nilnil v1.1.1 // indirect
github.com/Antonboom/testifylint v1.6.4 // indirect
github.com/BurntSushi/toml v1.6.0 // indirect
github.com/Djarvur/go-err113 v0.1.1 // indirect
github.com/Kunde21/markdownfmt/v3 v3.1.0 // indirect
github.com/Masterminds/goutils v1.1.1 // indirect
github.com/Masterminds/semver/v3 v3.4.0 // indirect
github.com/Masterminds/sprig/v3 v3.2.3 // indirect
github.com/MirrexOne/unqueryvet v1.5.4 // indirect
github.com/OpenPeeDeeP/depguard/v2 v2.2.1 // indirect
github.com/ProtonMail/go-crypto v1.4.0 // indirect
github.com/agext/levenshtein v1.2.3 // indirect github.com/agext/levenshtein v1.2.3 // indirect
github.com/alecthomas/chroma/v2 v2.23.1 // indirect
github.com/alecthomas/go-check-sumtype v0.3.1 // indirect
github.com/alexkohler/nakedret/v2 v2.0.6 // indirect
github.com/alexkohler/prealloc v1.1.0 // indirect
github.com/alfatraining/structtag v1.0.0 // indirect
github.com/alingse/asasalint v0.0.11 // indirect
github.com/alingse/nilnesserr v0.2.0 // indirect
github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect
github.com/armon/go-radix v1.0.0 // indirect
github.com/ashanbrown/forbidigo/v2 v2.3.0 // indirect
github.com/ashanbrown/makezero/v2 v2.1.0 // indirect
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/bgentry/speakeasy v0.1.0 // indirect
github.com/bkielbasa/cyclop v1.2.3 // indirect
github.com/blizzy78/varnamelen v0.8.0 // indirect
github.com/bmatcuk/doublestar/v4 v4.9.1 // indirect
github.com/bombsimon/wsl/v4 v4.7.0 // indirect
github.com/bombsimon/wsl/v5 v5.6.0 // indirect
github.com/breml/bidichk v0.3.3 // indirect
github.com/breml/errchkjson v0.4.1 // indirect
github.com/butuzov/ireturn v0.4.0 // indirect
github.com/butuzov/mirror v1.3.0 // indirect
github.com/catenacyber/perfsprint v0.10.1 // indirect
github.com/ccojocar/zxcvbn-go v1.0.4 // indirect
github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/charithe/durationcheck v0.0.11 // indirect
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc // indirect
github.com/charmbracelet/lipgloss v1.1.0 // indirect
github.com/charmbracelet/x/ansi v0.10.1 // indirect
github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd // indirect
github.com/charmbracelet/x/term v0.2.1 // indirect
github.com/ckaznocha/intrange v0.3.1 // indirect
github.com/cloudflare/circl v1.6.3 // indirect github.com/cloudflare/circl v1.6.3 // indirect
github.com/curioswitch/go-reassign v0.3.0 // indirect
github.com/daixiang0/gci v0.13.7 // indirect
github.com/dave/dst v0.27.3 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/denis-tingaikin/go-header v0.5.0 // indirect
github.com/dlclark/regexp2 v1.11.5 // indirect
github.com/ettle/strcase v0.2.0 // indirect
github.com/fatih/color v1.18.0 // indirect github.com/fatih/color v1.18.0 // indirect
github.com/fatih/structtag v1.2.0 // indirect github.com/golang-jwt/jwt/v5 v5.3.1 // indirect
github.com/firefart/nonamedreturns v1.0.6 // indirect
github.com/fsnotify/fsnotify v1.5.4 // indirect
github.com/fzipp/gocyclo v0.6.0 // indirect
github.com/ghostiam/protogetter v0.3.20 // indirect
github.com/go-critic/go-critic v0.14.3 // indirect
github.com/go-toolsmith/astcast v1.1.0 // indirect
github.com/go-toolsmith/astcopy v1.1.0 // indirect
github.com/go-toolsmith/astequal v1.2.0 // indirect
github.com/go-toolsmith/astfmt v1.1.0 // indirect
github.com/go-toolsmith/astp v1.1.0 // indirect
github.com/go-toolsmith/strparse v1.1.0 // indirect
github.com/go-toolsmith/typep v1.1.0 // indirect
github.com/go-viper/mapstructure/v2 v2.5.0 // indirect
github.com/go-xmlfmt/xmlfmt v1.1.3 // indirect
github.com/gobwas/glob v0.2.3 // indirect
github.com/godoc-lint/godoc-lint v0.11.2 // indirect
github.com/gofrs/flock v0.13.0 // indirect
github.com/golang/protobuf v1.5.4 // indirect github.com/golang/protobuf v1.5.4 // indirect
github.com/golangci/asciicheck v0.5.0 // indirect
github.com/golangci/dupl v0.0.0-20250308024227-f665c8d69b32 // indirect
github.com/golangci/go-printf-func-name v0.1.1 // indirect
github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d // indirect
github.com/golangci/golangci-lint/v2 v2.11.2 // indirect
github.com/golangci/golines v0.15.0 // indirect
github.com/golangci/misspell v0.8.0 // indirect
github.com/golangci/plugin-module-register v0.1.2 // indirect
github.com/golangci/revgrep v0.8.0 // indirect
github.com/golangci/swaggoswag v0.0.0-20250504205917-77f2aca3143e // indirect
github.com/golangci/unconvert v0.0.0-20250410112200-a129a6e6413e // indirect
github.com/gordonklaus/ineffassign v0.2.0 // indirect
github.com/gostaticanalysis/analysisutil v0.7.1 // indirect
github.com/gostaticanalysis/comment v1.5.0 // indirect
github.com/gostaticanalysis/forcetypeassert v0.2.0 // indirect
github.com/gostaticanalysis/nilerr v0.1.2 // indirect
github.com/hashicorp/cli v1.1.7 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/go-checkpoint v0.5.0 // indirect github.com/hashicorp/go-checkpoint v0.5.0 // indirect
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
github.com/hashicorp/go-cty v1.5.0 // indirect github.com/hashicorp/go-cty v1.5.0 // indirect
github.com/hashicorp/go-hclog v1.6.3 // indirect github.com/hashicorp/go-hclog v1.6.3 // indirect
github.com/hashicorp/go-immutable-radix/v2 v2.1.0 // indirect
github.com/hashicorp/go-multierror v1.1.1 // indirect github.com/hashicorp/go-multierror v1.1.1 // indirect
github.com/hashicorp/go-plugin v1.7.0 // indirect github.com/hashicorp/go-plugin v1.7.0 // indirect
github.com/hashicorp/go-uuid v1.0.3 // indirect github.com/hashicorp/go-uuid v1.0.3 // indirect
github.com/hashicorp/go-version v1.8.0 // indirect github.com/hashicorp/go-version v1.8.0 // indirect
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
github.com/hashicorp/hc-install v0.9.3 // indirect github.com/hashicorp/hc-install v0.9.3 // indirect
github.com/hashicorp/hcl v1.0.0 // indirect
github.com/hashicorp/hcl/v2 v2.24.0 // indirect github.com/hashicorp/hcl/v2 v2.24.0 // indirect
github.com/hashicorp/logutils v1.0.0 // indirect github.com/hashicorp/logutils v1.0.0 // indirect
github.com/hashicorp/terraform-exec v0.25.0 // indirect github.com/hashicorp/terraform-exec v0.25.0 // indirect
github.com/hashicorp/terraform-json v0.27.2 // indirect github.com/hashicorp/terraform-json v0.27.2 // indirect
github.com/hashicorp/terraform-plugin-docs v0.24.0 // indirect github.com/hashicorp/terraform-plugin-sdk/v2 v2.38.2 // indirect
github.com/hashicorp/terraform-plugin-sdk/v2 v2.39.0 // indirect
github.com/hashicorp/terraform-registry-address v0.4.0 // indirect github.com/hashicorp/terraform-registry-address v0.4.0 // indirect
github.com/hashicorp/terraform-svchost v0.2.1 // indirect github.com/hashicorp/terraform-svchost v0.2.0 // indirect
github.com/hashicorp/yamux v0.1.2 // indirect github.com/hashicorp/yamux v0.1.2 // indirect
github.com/hexops/gotextdiff v1.0.3 // indirect
github.com/huandu/xstrings v1.3.3 // indirect
github.com/imdario/mergo v0.3.15 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/jgautheron/goconst v1.8.2 // indirect
github.com/jingyugao/rowserrcheck v1.1.1 // indirect
github.com/jjti/go-spancheck v0.6.5 // indirect
github.com/julz/importas v0.2.0 // indirect
github.com/karamaru-alpha/copyloopvar v1.2.2 // indirect
github.com/kisielk/errcheck v1.10.0 // indirect
github.com/kkHAIKE/contextcheck v1.1.6 // indirect
github.com/kr/text v0.2.0 // indirect github.com/kr/text v0.2.0 // indirect
github.com/kulti/thelper v0.7.1 // indirect
github.com/kunwardeep/paralleltest v1.0.15 // indirect
github.com/lasiar/canonicalheader v1.1.2 // indirect
github.com/ldez/exptostd v0.4.5 // indirect
github.com/ldez/gomoddirectives v0.8.0 // indirect
github.com/ldez/grignotin v0.10.1 // indirect
github.com/ldez/structtags v0.6.1 // indirect
github.com/ldez/tagliatelle v0.7.2 // indirect
github.com/ldez/usetesting v0.5.0 // indirect
github.com/leonklingele/grouper v1.1.2 // indirect
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
github.com/macabu/inamedparam v0.2.0 // indirect
github.com/magiconair/properties v1.8.6 // indirect
github.com/manuelarte/embeddedstructfieldcheck v0.4.0 // indirect
github.com/manuelarte/funcorder v0.5.0 // indirect
github.com/maratori/testableexamples v1.0.1 // indirect
github.com/maratori/testpackage v1.1.2 // indirect
github.com/matoous/godox v1.1.0 // indirect
github.com/mattn/go-colorable v0.1.14 // indirect github.com/mattn/go-colorable v0.1.14 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-runewidth v0.0.16 // indirect
github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect
github.com/mgechev/revive v1.15.0 // indirect
github.com/mitchellh/copystructure v1.2.0 // indirect github.com/mitchellh/copystructure v1.2.0 // indirect
github.com/mitchellh/go-homedir v1.1.0 // indirect
github.com/mitchellh/go-testing-interface v1.14.1 // indirect github.com/mitchellh/go-testing-interface v1.14.1 // indirect
github.com/mitchellh/go-wordwrap v1.0.1 // indirect github.com/mitchellh/go-wordwrap v1.0.1 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/mitchellh/reflectwalk v1.0.2 // indirect github.com/mitchellh/reflectwalk v1.0.2 // indirect
github.com/moricho/tparallel v0.3.2 // indirect
github.com/muesli/termenv v0.16.0 // indirect
github.com/nakabonne/nestif v0.3.1 // indirect
github.com/nishanths/exhaustive v0.12.0 // indirect
github.com/nishanths/predeclared v0.2.2 // indirect
github.com/nunnatsa/ginkgolinter v0.23.0 // indirect
github.com/oklog/run v1.2.0 // indirect github.com/oklog/run v1.2.0 // indirect
github.com/pelletier/go-toml v1.9.5 // indirect
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/posener/complete v1.2.3 // indirect
github.com/prometheus/client_golang v1.12.1 // indirect
github.com/prometheus/client_model v0.2.0 // indirect
github.com/prometheus/common v0.32.1 // indirect
github.com/prometheus/procfs v0.7.3 // indirect
github.com/quasilyte/go-ruleguard v0.4.5 // indirect
github.com/quasilyte/go-ruleguard/dsl v0.3.23 // indirect
github.com/quasilyte/gogrep v0.5.0 // indirect
github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727 // indirect
github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 // indirect
github.com/raeperd/recvcheck v0.2.0 // indirect
github.com/rivo/uniseg v0.4.7 // indirect
github.com/rogpeppe/go-internal v1.14.1 // indirect
github.com/ryancurrah/gomodguard v1.4.1 // indirect
github.com/ryanrolds/sqlclosecheck v0.5.1 // indirect
github.com/sanposhiho/wastedassign/v2 v2.1.0 // indirect
github.com/santhosh-tekuri/jsonschema/v6 v6.0.2 // indirect
github.com/sashamelentyev/interfacebloat v1.1.0 // indirect
github.com/sashamelentyev/usestdlibvars v1.29.0 // indirect
github.com/securego/gosec/v2 v2.24.7 // indirect
github.com/shopspring/decimal v1.3.1 // indirect
github.com/sirupsen/logrus v1.9.4 // indirect
github.com/sivchari/containedctx v1.0.3 // indirect
github.com/sonatard/noctx v0.5.0 // indirect
github.com/sourcegraph/go-diff v0.7.0 // indirect
github.com/spf13/afero v1.15.0 // indirect
github.com/spf13/cast v1.5.0 // indirect
github.com/spf13/jwalterweatherman v1.1.0 // indirect
github.com/spf13/pflag v1.0.10 // indirect github.com/spf13/pflag v1.0.10 // indirect
github.com/spf13/viper v1.12.0 // indirect
github.com/ssgreg/nlreturn/v2 v2.2.1 // indirect
github.com/stbenjam/no-sprintf-host-port v0.3.1 // indirect
github.com/stretchr/objx v0.5.2 // indirect
github.com/stretchr/testify v1.11.1 // indirect github.com/stretchr/testify v1.11.1 // indirect
github.com/subosito/gotenv v1.4.1 // indirect
github.com/tetafro/godot v1.5.4 // indirect
github.com/timakin/bodyclose v0.0.0-20241222091800-1db5c5ca4d67 // indirect
github.com/timonwong/loggercheck v0.11.0 // indirect
github.com/tomarrell/wrapcheck/v2 v2.12.0 // indirect
github.com/tommy-muehle/go-mnd/v2 v2.5.1 // indirect
github.com/ultraware/funlen v0.2.0 // indirect
github.com/ultraware/whitespace v0.2.0 // indirect
github.com/uudashr/gocognit v1.2.1 // indirect
github.com/uudashr/iface v1.4.1 // indirect
github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect
github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
github.com/xen0n/gosmopolitan v1.3.0 // indirect github.com/zclconf/go-cty v1.17.0 // indirect
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
github.com/yagipy/maintidx v1.0.0 // indirect
github.com/yeya24/promlinter v0.3.0 // indirect
github.com/ykadowak/zerologlint v0.1.5 // indirect
github.com/yuin/goldmark v1.7.7 // indirect
github.com/yuin/goldmark-meta v1.1.0 // indirect
github.com/zclconf/go-cty v1.18.0 // indirect
gitlab.com/bosi/decorder v0.4.2 // indirect
go-simpler.org/musttag v0.14.0 // indirect
go-simpler.org/sloglint v0.11.1 // indirect
go.abhg.dev/goldmark/frontmatter v0.2.0 // indirect
go.augendre.info/arangolint v0.4.0 // indirect
go.augendre.info/fatcontext v0.9.0 // indirect
go.uber.org/multierr v1.10.0 // indirect
go.uber.org/zap v1.27.0 // indirect
go.yaml.in/yaml/v3 v3.0.4 // indirect
golang.org/x/crypto v0.48.0 // indirect golang.org/x/crypto v0.48.0 // indirect
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b // indirect
golang.org/x/exp/typeparams v0.0.0-20260209203927-2842357ff358 // indirect
golang.org/x/mod v0.33.0 // indirect golang.org/x/mod v0.33.0 // indirect
golang.org/x/net v0.51.0 // indirect golang.org/x/net v0.50.0 // indirect
golang.org/x/sync v0.19.0 // indirect golang.org/x/sync v0.19.0 // indirect
golang.org/x/sys v0.41.0 // indirect golang.org/x/sys v0.41.0 // indirect
golang.org/x/text v0.34.0 // indirect golang.org/x/text v0.34.0 // indirect
golang.org/x/tools v0.42.0 // indirect golang.org/x/tools v0.42.0 // indirect
google.golang.org/appengine v1.6.8 // indirect google.golang.org/appengine v1.6.8 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20260226221140-a57be14db171 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20260209200024-4cfbd4190f57 // indirect
google.golang.org/grpc v1.79.2 // indirect google.golang.org/grpc v1.79.1 // indirect
google.golang.org/protobuf v1.36.11 // indirect google.golang.org/protobuf v1.36.11 // indirect
gopkg.in/ini.v1 v1.67.0 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
honnef.co/go/tools v0.7.0 // indirect
mvdan.cc/gofumpt v0.9.2 // indirect
mvdan.cc/unparam v0.0.0-20251027182757-5beb8c8f8f15 // indirect
) )
tool golang.org/x/tools/cmd/goimports

1009
go.sum

File diff suppressed because it is too large Load diff

View file

@ -2,13 +2,6 @@
version: "2" version: "2"
run: run:
concurrency: 4 concurrency: 4
output:
formats:
text:
print-linter-name: true
print-issued-lines: true
colors: true
path: stdout
linters: linters:
enable: enable:
- bodyclose - bodyclose
@ -75,10 +68,6 @@ linters:
- name: empty-lines - name: empty-lines
- name: early-return - name: early-return
exclusions: exclusions:
paths:
- stackit-sdk-generator/
- generated/
- pkg_gen/
generated: lax generated: lax
warn-unused: true warn-unused: true
# Excluding configuration per-path, per-linter, per-text and per-source. # Excluding configuration per-path, per-linter, per-text and per-source.
@ -87,6 +76,14 @@ linters:
- path: _test\.go - path: _test\.go
linters: linters:
- gochecknoinits - gochecknoinits
paths:
- third_party/
- builtin/
- examples/
- tools/copy.go
- tools/main.go
- pkg_gen/
- cmd/
formatters: formatters:
enable: enable:
- gofmt - gofmt
@ -95,3 +92,11 @@ formatters:
goimports: goimports:
local-prefixes: local-prefixes:
- tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview - tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview
exclusions:
generated: lax
paths:
- third_party/
- builtin/
- examples/
- pkg_gen/
- cmd/

View file

@ -0,0 +1,11 @@
package testutil
import "testing"
func Equal[V comparable](t *testing.T, got, expected V) {
t.Helper()
if expected != got {
t.Errorf("assert equal failed:\ngot: %v \nexpected: %v", got, expected)
}
}

View file

@ -0,0 +1,651 @@
package testutil
import (
"fmt"
"os"
"path/filepath"
"strings"
"time"
"github.com/hashicorp/terraform-plugin-framework/providerserver"
"github.com/hashicorp/terraform-plugin-go/tfprotov6"
"github.com/hashicorp/terraform-plugin-testing/config"
"github.com/hashicorp/terraform-plugin-testing/echoprovider"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit"
)
const (
// Default location of credentials JSON
// credentialsFilePath = ".stackit/credentials.json" //nolint:gosec // linter false positive
serviceAccountFilePath = ".stackit/service_account.json"
)
var (
// TestAccProtoV6ProviderFactories is used to instantiate a provider during
// acceptance testing. The factory function will be invoked for every Terraform
// CLI command executed to create a provider server to which the CLI can
// reattach.
TestAccProtoV6ProviderFactories = map[string]func() (tfprotov6.ProviderServer, error){
"stackitprivatepreview": providerserver.NewProtocol6WithError(stackit.New("test-version")()),
}
// TestEphemeralAccProtoV6ProviderFactories is used to instantiate a provider during
// acceptance testing. The factory function will be invoked for every Terraform
// CLI command executed to create a provider server to which the CLI can
// reattach.
//
// See the Terraform acceptance test documentation on ephemeral resources for more information:
// https://developer.hashicorp.com/terraform/plugin/testing/acceptance-tests/ephemeral-resources
TestEphemeralAccProtoV6ProviderFactories = map[string]func() (tfprotov6.ProviderServer, error){
"stackitprivatepreview": providerserver.NewProtocol6WithError(stackit.New("test-version")()),
"echo": echoprovider.NewProviderServer(),
}
// E2ETestsEnabled checks if end-to-end tests should be run.
// It is enabled when the TF_ACC environment variable is set to "1".
E2ETestsEnabled = os.Getenv("TF_ACC") == "1"
// OrganizationId is the id of organization used for tests
OrganizationId = os.Getenv("TF_ACC_ORGANIZATION_ID")
// ProjectId is the id of project used for tests
ProjectId = os.Getenv("TF_ACC_PROJECT_ID")
Region = os.Getenv("TF_ACC_REGION")
// ServiceAccountFile is the json file of the service account
ServiceAccountFile = os.Getenv("TF_ACC_SERVICE_ACCOUNT_FILE")
// ServerId is the id of a server used for some tests
ServerId = getenv("TF_ACC_SERVER_ID", "")
// TestProjectParentContainerID is the container id of the parent resource under which projects are created as part of the resource-manager acceptance tests
TestProjectParentContainerID = os.Getenv("TF_ACC_TEST_PROJECT_PARENT_CONTAINER_ID")
// TestProjectParentUUID is the uuid of the parent resource under which projects are created as part of the resource-manager acceptance tests
TestProjectParentUUID = os.Getenv("TF_ACC_TEST_PROJECT_PARENT_UUID")
// TestProjectServiceAccountEmail is the e-mail of a service account with admin permissions on the organization under which projects are created as part of the resource-manager acceptance tests
TestProjectServiceAccountEmail = os.Getenv("TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL")
// TestProjectUserEmail is the e-mail of a user for the project created as part of the resource-manager acceptance tests
// Default email: acc-test@sa.stackit.cloud
TestProjectUserEmail = getenv("TF_ACC_TEST_PROJECT_USER_EMAIL", "acc-test@sa.stackit.cloud")
// TestImageLocalFilePath is the local path to an image file used for image acceptance tests
TestImageLocalFilePath = getenv("TF_ACC_TEST_IMAGE_LOCAL_FILE_PATH", "default")
CdnCustomEndpoint = os.Getenv("TF_ACC_CDN_CUSTOM_ENDPOINT")
DnsCustomEndpoint = os.Getenv("TF_ACC_DNS_CUSTOM_ENDPOINT")
GitCustomEndpoint = os.Getenv("TF_ACC_GIT_CUSTOM_ENDPOINT")
IaaSCustomEndpoint = os.Getenv("TF_ACC_IAAS_CUSTOM_ENDPOINT")
KMSCustomEndpoint = os.Getenv("TF_ACC_KMS_CUSTOM_ENDPOINT")
LoadBalancerCustomEndpoint = os.Getenv("TF_ACC_LOADBALANCER_CUSTOM_ENDPOINT")
LogMeCustomEndpoint = os.Getenv("TF_ACC_LOGME_CUSTOM_ENDPOINT")
MariaDBCustomEndpoint = os.Getenv("TF_ACC_MARIADB_CUSTOM_ENDPOINT")
ModelServingCustomEndpoint = os.Getenv("TF_ACC_MODELSERVING_CUSTOM_ENDPOINT")
AuthorizationCustomEndpoint = os.Getenv("TF_ACC_authorization_custom_endpoint")
MongoDBFlexCustomEndpoint = os.Getenv("TF_ACC_MONGODBFLEX_CUSTOM_ENDPOINT")
OpenSearchCustomEndpoint = os.Getenv("TF_ACC_OPENSEARCH_CUSTOM_ENDPOINT")
ObservabilityCustomEndpoint = os.Getenv("TF_ACC_OBSERVABILITY_CUSTOM_ENDPOINT")
ObjectStorageCustomEndpoint = os.Getenv("TF_ACC_OBJECTSTORAGE_CUSTOM_ENDPOINT")
PostgresFlexCustomEndpoint = os.Getenv("TF_ACC_POSTGRESFLEX_CUSTOM_ENDPOINT")
RabbitMQCustomEndpoint = os.Getenv("TF_ACC_RABBITMQ_CUSTOM_ENDPOINT")
RedisCustomEndpoint = os.Getenv("TF_ACC_REDIS_CUSTOM_ENDPOINT")
ResourceManagerCustomEndpoint = os.Getenv("TF_ACC_RESOURCEMANAGER_CUSTOM_ENDPOINT")
ScfCustomEndpoint = os.Getenv("TF_ACC_SCF_CUSTOM_ENDPOINT")
SecretsManagerCustomEndpoint = os.Getenv("TF_ACC_SECRETSMANAGER_CUSTOM_ENDPOINT")
SQLServerFlexCustomEndpoint = os.Getenv("TF_ACC_SQLSERVERFLEX_CUSTOM_ENDPOINT")
ServerBackupCustomEndpoint = os.Getenv("TF_ACC_SERVER_BACKUP_CUSTOM_ENDPOINT")
ServerUpdateCustomEndpoint = os.Getenv("TF_ACC_SERVER_UPDATE_CUSTOM_ENDPOINT")
ServiceAccountCustomEndpoint = os.Getenv("TF_ACC_SERVICE_ACCOUNT_CUSTOM_ENDPOINT")
SKECustomEndpoint = os.Getenv("TF_ACC_SKE_CUSTOM_ENDPOINT")
)
// Provider config helper functions
func ObservabilityProviderConfig() string {
if ObservabilityCustomEndpoint == "" {
return `provider "stackitprivatepreview" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
observability_custom_endpoint = "%s"
}`,
ObservabilityCustomEndpoint,
)
}
func CdnProviderConfig() string {
if CdnCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
enable_beta_resources = true
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
cdn_custom_endpoint = "%s"
enable_beta_resources = true
}`,
CdnCustomEndpoint,
)
}
func DnsProviderConfig() string {
if DnsCustomEndpoint == "" {
return `provider "stackitprivatepreview" {}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
dns_custom_endpoint = "%s"
}`,
DnsCustomEndpoint,
)
}
func IaaSProviderConfig() string {
if IaaSCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
iaas_custom_endpoint = "%s"
}`,
IaaSCustomEndpoint,
)
}
func IaaSProviderConfigWithBetaResourcesEnabled() string {
if IaaSCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
enable_beta_resources = true
default_region = "eu01"
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
enable_beta_resources = true
iaas_custom_endpoint = "%s"
}`,
IaaSCustomEndpoint,
)
}
func IaaSProviderConfigWithExperiments() string {
if IaaSCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
experiments = [ "routing-tables", "network" ]
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
iaas_custom_endpoint = "%s"
experiments = [ "routing-tables", "network" ]
}`,
IaaSCustomEndpoint,
)
}
func KMSProviderConfig() string {
if KMSCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
kms_custom_endpoint = "%s"
}`,
KMSCustomEndpoint,
)
}
func LoadBalancerProviderConfig() string {
if LoadBalancerCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
loadbalancer_custom_endpoint = "%s"
}`,
LoadBalancerCustomEndpoint,
)
}
func LogMeProviderConfig() string {
if LogMeCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
logme_custom_endpoint = "%s"
}`,
LogMeCustomEndpoint,
)
}
func MariaDBProviderConfig() string {
if MariaDBCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
mariadb_custom_endpoint = "%s"
}`,
MariaDBCustomEndpoint,
)
}
func ModelServingProviderConfig() string {
if ModelServingCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
}
`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
modelserving_custom_endpoint = "%s"
}`,
ModelServingCustomEndpoint,
)
}
func MongoDBFlexProviderConfig() string {
if MongoDBFlexCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
mongodbflex_custom_endpoint = "%s"
}`,
MongoDBFlexCustomEndpoint,
)
}
func ObjectStorageProviderConfig() string {
if ObjectStorageCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
objectstorage_custom_endpoint = "%s"
}`,
ObjectStorageCustomEndpoint,
)
}
func OpenSearchProviderConfig() string {
if OpenSearchCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
opensearch_custom_endpoint = "%s"
}`,
OpenSearchCustomEndpoint,
)
}
func PostgresFlexProviderConfig(saFile string) string {
if PostgresFlexCustomEndpoint == "" {
return fmt.Sprintf(`
provider "stackitprivatepreview" {
default_region = "eu01"
service_account_key_path = "%s"
}`, saFile)
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
service_account_key_path = "%s"
postgresflex_custom_endpoint = "%s"
}`,
saFile,
PostgresFlexCustomEndpoint,
)
}
func RabbitMQProviderConfig() string {
if RabbitMQCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
rabbitmq_custom_endpoint = "%s"
}`,
RabbitMQCustomEndpoint,
)
}
func RedisProviderConfig() string {
if RedisCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
redis_custom_endpoint = "%s"
}`,
RedisCustomEndpoint,
)
}
func ResourceManagerProviderConfig() string {
key := GetTestProjectServiceAccountJson("")
if ResourceManagerCustomEndpoint == "" || AuthorizationCustomEndpoint == "" {
return fmt.Sprintf(`
provider "stackitprivatepreview" {
service_account_key = "%s"
}`,
key,
)
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
resourcemanager_custom_endpoint = "%s"
authorization_custom_endpoint = "%s"
service_account_token = "%s"
}`,
ResourceManagerCustomEndpoint,
AuthorizationCustomEndpoint,
key,
)
}
func SecretsManagerProviderConfig() string {
if SecretsManagerCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
secretsmanager_custom_endpoint = "%s"
}`,
SecretsManagerCustomEndpoint,
)
}
func SQLServerFlexProviderConfig(saFile string) string {
if SQLServerFlexCustomEndpoint == "" {
return fmt.Sprintf(`
provider "stackitprivatepreview" {
default_region = "eu01"
service_account_key_path = "%s"
}`, saFile)
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
service_account_key_path = "%s"
sqlserverflex_custom_endpoint = "%s"
}`,
saFile,
SQLServerFlexCustomEndpoint,
)
}
func ServerBackupProviderConfig() string {
if ServerBackupCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
enable_beta_resources = true
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
server_backup_custom_endpoint = "%s"
enable_beta_resources = true
}`,
ServerBackupCustomEndpoint,
)
}
func ServerUpdateProviderConfig() string {
if ServerUpdateCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
enable_beta_resources = true
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
server_update_custom_endpoint = "%s"
enable_beta_resources = true
}`,
ServerUpdateCustomEndpoint,
)
}
func SKEProviderConfig() string {
if SKECustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
ske_custom_endpoint = "%s"
}`,
SKECustomEndpoint,
)
}
func AuthorizationProviderConfig() string {
if AuthorizationCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
experiments = ["iam"]
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
authorization_custom_endpoint = "%s"
experiments = ["iam"]
}`,
AuthorizationCustomEndpoint,
)
}
func ServiceAccountProviderConfig() string {
if ServiceAccountCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
enable_beta_resources = true
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
service_account_custom_endpoint = "%s"
enable_beta_resources = true
}`,
ServiceAccountCustomEndpoint,
)
}
func GitProviderConfig() string {
if GitCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
enable_beta_resources = true
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
git_custom_endpoint = "%s"
enable_beta_resources = true
}`,
GitCustomEndpoint,
)
}
func ScfProviderConfig() string {
if ScfCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
default_region = "eu01"
scf_custom_endpoint = "%s"
}`,
ScfCustomEndpoint,
)
}
func ResourceNameWithDateTime(name string) string {
dateTime := time.Now().Format(time.RFC3339)
// Remove timezone to have a smaller datetime
dateTimeTrimmed, _, _ := strings.Cut(dateTime, "+")
return fmt.Sprintf("tf-acc-%s-%s", name, dateTimeTrimmed)
}
func GetTestProjectServiceAccountJson(path string) string {
var err error
token, tokenSet := os.LookupEnv("TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_JSON")
if !tokenSet || token == "" {
token, err = readTestServiceAccountJsonFromFile(path)
if err != nil {
return ""
}
}
return token
}
//func GetTestProjectServiceAccountToken(path string) string {
// var err error
// token, tokenSet := os.LookupEnv("TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN")
// if !tokenSet || token == "" {
// token, err = readTestTokenFromCredentialsFile(path)
// if err != nil {
// return ""
// }
// }
// return token
//}
//
//func readTestTokenFromCredentialsFile(path string) (string, error) {
// if path == "" {
// customPath, customPathSet := os.LookupEnv("STACKIT_CREDENTIALS_PATH")
// if !customPathSet || customPath == "" {
// path = credentialsFilePath
// home, err := os.UserHomeDir()
// if err != nil {
// return "", fmt.Errorf("getting home directory: %w", err)
// }
// path = filepath.Join(home, path)
// } else {
// path = customPath
// }
// }
//
// credentialsRaw, err := os.ReadFile(path)
// if err != nil {
// return "", fmt.Errorf("opening file: %w", err)
// }
//
// var credentials struct {
// TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN string `json:"TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN"`
// }
// err = json.Unmarshal(credentialsRaw, &credentials)
// if err != nil {
// return "", fmt.Errorf("unmarshalling credentials: %w", err)
// }
// return credentials.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN, nil
//}
func readTestServiceAccountJsonFromFile(path string) (string, error) {
if path == "" {
customPath, customPathSet := os.LookupEnv("STACKIT_SERVICE_ACCOUNT_PATH")
if !customPathSet || customPath == "" {
path = serviceAccountFilePath
home, err := os.UserHomeDir()
if err != nil {
return "", fmt.Errorf("getting home directory: %w", err)
}
path = filepath.Join(home, path)
} else {
path = customPath
}
}
credentialsRaw, err := os.ReadFile(path)
if err != nil {
return "", fmt.Errorf("opening file: %w", err)
}
return string(credentialsRaw), nil
}
func getenv(key, defaultValue string) string {
val := os.Getenv(key)
if val == "" {
return defaultValue
}
return val
}
// CreateDefaultLocalFile is a helper for local_file_path. No real data is created
func CreateDefaultLocalFile() os.File {
// Define the file name and size
fileName := "test-512k.img"
size := 512 * 1024 // 512 KB
// Create the file
file, err := os.Create(fileName)
if err != nil {
panic(err)
}
// Seek to the desired position (512 KB)
_, err = file.Seek(int64(size), 0)
if err != nil {
panic(err)
}
return *file
}
func ConvertConfigVariable(variable config.Variable) string {
tmpByteArray, _ := variable.MarshalJSON()
// In case the variable is a string, the quotes should be removed
if tmpByteArray[0] == '"' && tmpByteArray[len(tmpByteArray)-1] == '"' {
result := string(tmpByteArray[1 : len(tmpByteArray)-1])
// Replace escaped quotes which where added MarshalJSON
rawString := strings.ReplaceAll(result, `\"`, `"`)
return rawString
}
return string(tmpByteArray)
}

View file

@ -0,0 +1,50 @@
// Copyright (c) STACKIT
package testutil
import (
"testing"
"github.com/hashicorp/terraform-plugin-testing/config"
)
func TestConvertConfigVariable(t *testing.T) {
tests := []struct {
name string
variable config.Variable
want string
}{
{
name: "string",
variable: config.StringVariable("test"),
want: "test",
},
{
name: "bool: true",
variable: config.BoolVariable(true),
want: "true",
},
{
name: "bool: false",
variable: config.BoolVariable(false),
want: "false",
},
{
name: "integer",
variable: config.IntegerVariable(10),
want: "10",
},
{
name: "quoted string",
variable: config.StringVariable(`instance =~ ".*"`),
want: `instance =~ ".*"`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := ConvertConfigVariable(tt.variable); got != tt.want {
t.Errorf("ConvertConfigVariable() = %v, want %v", got, tt.want)
}
})
}
}

View file

@ -1,39 +0,0 @@
package testutils
import (
"fmt"
"net/http"
"path/filepath"
"regexp"
"runtime"
"strings"
"github.com/jarcoal/httpmock"
)
func TestName() string {
pc, _, _, _ := runtime.Caller(1)
nameFull := runtime.FuncForPC(pc).Name()
nameEnd := filepath.Ext(nameFull)
name := strings.TrimPrefix(nameEnd, ".")
return name
}
func ActivateEnvironmentHttpMocks() {
httpmock.RegisterNoResponder(
func(req *http.Request) (*http.Response, error) {
return nil, fmt.Errorf("no responder found for %s %s, please check your http mocks", req.Method, req.URL)
},
)
httpmock.RegisterRegexpResponder(
"GET",
regexp.MustCompile(`^https://api\.bap\.microsoft\.com/providers/Microsoft\.BusinessAppPlatform/locations/(europe|unitedstates)/environmentLanguages\?api-version=2023-06-01$`),
func(_ *http.Request) (*http.Response, error) {
return httpmock.NewStringResponse(
http.StatusOK,
httpmock.File("../../services/languages/tests/datasource/Validate_Read/get_languages.json").String(),
), nil
},
)
}

View file

@ -53,9 +53,9 @@ func CreateTemporaryHome(createValidCredentialsFile bool, t *testing.T) string {
// Define content, default = invalid token // Define content, default = invalid token
token := "foo_token" token := "foo_token"
//if createValidCredentialsFile { if createValidCredentialsFile {
// token = GetTestProjectServiceAccountJson("") token = GetTestProjectServiceAccountJson("")
//} }
if _, err = file.WriteString(token); err != nil { if _, err = file.WriteString(token); err != nil {
t.Fatalf("Error writing to file: %v", err) t.Fatalf("Error writing to file: %v", err)
} }

View file

@ -293,24 +293,25 @@ func RedisProviderConfig() string {
) )
} }
func ResourceManagerProviderConfig(saKeyPath string) string { func ResourceManagerProviderConfig() string {
key := GetTestProjectServiceAccountJson("")
if ResourceManagerCustomEndpoint == "" || AuthorizationCustomEndpoint == "" { if ResourceManagerCustomEndpoint == "" || AuthorizationCustomEndpoint == "" {
return fmt.Sprintf(` return fmt.Sprintf(`
provider "stackitprivatepreview" { provider "stackitprivatepreview" {
service_account_key_path = "%s" service_account_key = "%s"
}`, }`,
saKeyPath, key,
) )
} }
return fmt.Sprintf(` return fmt.Sprintf(`
provider "stackitprivatepreview" { provider "stackitprivatepreview" {
resourcemanager_custom_endpoint = "%s" resourcemanager_custom_endpoint = "%s"
authorization_custom_endpoint = "%s" authorization_custom_endpoint = "%s"
service_account_key_path = "%s" service_account_token = "%s"
}`, }`,
ResourceManagerCustomEndpoint, ResourceManagerCustomEndpoint,
AuthorizationCustomEndpoint, AuthorizationCustomEndpoint,
saKeyPath, key,
) )
} }

View file

@ -6,6 +6,7 @@ import (
"log/slog" "log/slog"
"os" "os"
"os/exec" "os/exec"
"path/filepath"
"strings" "strings"
"time" "time"
@ -19,8 +20,9 @@ import (
) )
const ( const (
// Default location of service account JSON // Default location of credentials JSON
serviceAccountFilePath = "service_account.json" // credentialsFilePath = ".stackit/credentials.json" //nolint:gosec // linter false positive
serviceAccountFilePath = ".stackit/service_account.json"
) )
var ( var (
@ -99,17 +101,17 @@ func ResourceNameWithDateTime(name string) string {
return fmt.Sprintf("tf-acc-%s-%s", name, dateTimeTrimmed) return fmt.Sprintf("tf-acc-%s-%s", name, dateTimeTrimmed)
} }
//func GetTestProjectServiceAccountJson(path string) string { func GetTestProjectServiceAccountJson(path string) string {
// var err error var err error
// json, ok := os.LookupEnv("TF_ACC_SERVICE_ACCOUNT_JSON_CONTENT") token, tokenSet := os.LookupEnv("TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_JSON")
// if !ok || json == "" { if !tokenSet || token == "" {
// json, err = readTestServiceAccountJsonFromFile(path) token, err = readTestServiceAccountJsonFromFile(path)
// if err != nil { if err != nil {
// return "" return ""
// } }
// } }
// return json return token
//} }
// func GetTestProjectServiceAccountToken(path string) string { // func GetTestProjectServiceAccountToken(path string) string {
// var err error // var err error
@ -153,30 +155,27 @@ func ResourceNameWithDateTime(name string) string {
// return credentials.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN, nil // return credentials.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN, nil
//} //}
//func readTestServiceAccountJsonFromFile(path string) (string, error) { func readTestServiceAccountJsonFromFile(path string) (string, error) {
// if path == "" { if path == "" {
// customPath, ok := os.LookupEnv("TF_ACC_SERVICE_ACCOUNT_FILE") customPath, customPathSet := os.LookupEnv("STACKIT_SERVICE_ACCOUNT_PATH")
// if !ok || customPath == "" { if !customPathSet || customPath == "" {
// path = serviceAccountFilePath path = serviceAccountFilePath
// // TODO: check if we want to handle this with a home dir home, err := os.UserHomeDir()
// /* if err != nil {
// home, err := os.UserHomeDir() return "", fmt.Errorf("getting home directory: %w", err)
// if err != nil { }
// return "", fmt.Errorf("getting home directory: %w", err) path = filepath.Join(home, path)
// } } else {
// path = filepath.Join(home, path) path = customPath
// */ }
// } else { }
// path = customPath
// } credentialsRaw, err := os.ReadFile(path)
// } if err != nil {
// return "", fmt.Errorf("opening file: %w", err)
// credentialsRaw, err := os.ReadFile(path) }
// if err != nil { return string(credentialsRaw), nil
// return "", fmt.Errorf("opening file: %w", err) }
// }
// return string(credentialsRaw), nil
//}
func getenv(key, defaultValue string) string { func getenv(key, defaultValue string) string {
val := os.Getenv(key) val := os.Getenv(key)

View file

@ -1,5 +1,5 @@
data "stackitprivatepreview_sqlserverflexbeta_flavor" "sqlserver_flavor" { data "stackitprivatepreview_sqlserverflexalpha_flavor" "sqlserver_flavor" {
project_id = var.project_id project_id = var.project_id
region = "eu01" region = "eu01"
cpu = 4 cpu = 4
@ -9,5 +9,5 @@ data "stackitprivatepreview_sqlserverflexbeta_flavor" "sqlserver_flavor" {
} }
output "sqlserver_flavor" { output "sqlserver_flavor" {
value = data.stackitprivatepreview_sqlserverflexbeta_flavor.sqlserver_flavor.flavor_id value = data.stackitprivatepreview_sqlserverflexalpha_flavor.sqlserver_flavor.flavor_id
} }

View file

@ -18,15 +18,15 @@
# value = stackit_kms_key.key.key_id # value = stackit_kms_key.key.key_id
# } # }
resource "stackitprivatepreview_sqlserverflexbeta_instance" "msh-beta-sna-001" { resource "stackitprivatepreview_sqlserverflexalpha_instance" "msh-sna-001" {
project_id = var.project_id project_id = var.project_id
name = "msh-beta-sna-001" name = "msh-sna-001"
backup_schedule = "0 3 * * *" backup_schedule = "0 3 * * *"
retention_days = 31 retention_days = 31
flavor_id = data.stackitprivatepreview_sqlserverflexbeta_flavor.sqlserver_flavor.flavor_id flavor_id = data.stackitprivatepreview_sqlserverflexalpha_flavor.sqlserver_flavor.flavor_id
storage = { storage = {
class = "premium-perf2-stackit" class = "premium-perf2-stackit"
size = 10 size = 50
} }
version = 2022 version = 2022
encryption = { encryption = {
@ -34,11 +34,9 @@ resource "stackitprivatepreview_sqlserverflexbeta_instance" "msh-beta-sna-001" {
#keyring_id = stackit_kms_keyring.keyring.keyring_id #keyring_id = stackit_kms_keyring.keyring.keyring_id
#key_version = 1 #key_version = 1
# key with scope public # key with scope public
# kek_key_id = "fe039bcf-8d7b-431a-801d-9e81371a6b7b" kek_key_id = "fe039bcf-8d7b-431a-801d-9e81371a6b7b"
kek_key_id = "c6878f92-ce55-4b79-8236-ba9d001d7967" # msh-k-001
# key_id = var.key_id # key_id = var.key_id
# kek_key_ring_id = var.keyring_id kek_key_ring_id = var.keyring_id
kek_key_ring_id = "0dea3f5f-9947-4dda-a9d3-18418832cefe" # msh-kr-sna01
kek_key_version = var.key_version kek_key_version = var.key_version
service_account = var.sa_email service_account = var.sa_email
} }
@ -48,16 +46,83 @@ resource "stackitprivatepreview_sqlserverflexbeta_instance" "msh-beta-sna-001" {
} }
} }
resource "stackitprivatepreview_sqlserverflexbeta_user" "betauser" { resource "stackitprivatepreview_sqlserverflexalpha_instance" "msh-sna-101" {
project_id = var.project_id project_id = var.project_id
instance_id = stackitprivatepreview_sqlserverflexbeta_instance.msh-beta-sna-001.instance_id name = "msh-sna-101"
username = "betauser" backup_schedule = "0 3 * * *"
roles = ["##STACKIT_DatabaseManager##", "##STACKIT_LoginManager##"] retention_days = 31
flavor_id = data.stackitprivatepreview_sqlserverflexalpha_flavor.sqlserver_flavor.flavor_id
storage = {
class = "premium-perf2-stackit"
size = 50
}
version = 2022
encryption = {
#key_id = stackit_kms_key.key.key_id
#keyring_id = stackit_kms_keyring.keyring.keyring_id
#key_version = 1
# key with scope public
kek_key_id = "fe039bcf-8d7b-431a-801d-9e81371a6b7b"
# key_id = var.key_id
kek_key_ring_id = var.keyring_id
kek_key_version = var.key_version
service_account = var.sa_email
}
network = {
acl = ["0.0.0.0/0", "193.148.160.0/19"]
access_scope = "SNA"
}
} }
resource "stackitprivatepreview_sqlserverflexbeta_database" "betadb" { resource "stackitprivatepreview_sqlserverflexalpha_instance" "msh-nosna-001" {
project_id = var.project_id project_id = var.project_id
instance_id = stackitprivatepreview_sqlserverflexbeta_instance.msh-beta-sna-001.instance_id name = "msh-nosna-001"
name = "mshtest002" backup_schedule = "0 3 * * *"
owner = stackitprivatepreview_sqlserverflexbeta_user.betauser.username retention_days = 31
flavor_id = data.stackitprivatepreview_sqlserverflexalpha_flavor.sqlserver_flavor.flavor_id
storage = {
class = "premium-perf2-stackit"
size = 50
} }
version = 2022
# encryption = {
# #key_id = stackit_kms_key.key.key_id
# #keyring_id = stackit_kms_keyring.keyring.keyring_id
# #key_version = 1
# #key_id = var.key_id
# # key with scope public
# key_id = "fe039bcf-8d7b-431a-801d-9e81371a6b7b"
# keyring_id = var.keyring_id
# key_version = var.key_version
# service_account = var.sa_email
# }
network = {
acl = ["0.0.0.0/0", "193.148.160.0/19"]
access_scope = "PUBLIC"
}
}
# data "stackitprivatepreview_sqlserverflexalpha_instance" "test" {
# project_id = var.project_id
# instance_id = var.instance_id
# region = "eu01"
# }
# output "test" {
# value = data.stackitprivatepreview_sqlserverflexalpha_instance.test
# }
# resource "stackitprivatepreview_sqlserverflexalpha_user" "ptlsdbadminuser" {
# project_id = var.project_id
# instance_id = stackitprivatepreview_sqlserverflexalpha_instance.sqlsrv.instance_id
# username = var.db_admin_username
# roles = ["##STACKIT_LoginManager##", "##STACKIT_DatabaseManager##"]
# }
# resource "stackitprivatepreview_sqlserverflexalpha_user" "ptlsdbuser" {
# project_id = var.project_id
# instance_id = stackitprivatepreview_sqlserverflexalpha_instance.sqlsrv.instance_id
# username = var.db_username
# roles = ["##STACKIT_LoginManager##"]
# }

19
scripts/lint-golangci-lint.sh Executable file
View file

@ -0,0 +1,19 @@
#!/usr/bin/env bash
# This script lints the SDK modules and the internal examples
# Pre-requisites: golangci-lint
set -eo pipefail
ROOT_DIR=$(git rev-parse --show-toplevel)
GOLANG_CI_YAML_PATH="${ROOT_DIR}/golang-ci.yaml"
GOLANG_CI_ARGS="--allow-parallel-runners --timeout=5m --config=${GOLANG_CI_YAML_PATH}"
if type -p golangci-lint >/dev/null; then
:
else
echo "golangci-lint not installed, unable to proceed."
exit 1
fi
cd ${ROOT_DIR}
golangci-lint run ${GOLANG_CI_ARGS}

View file

@ -17,7 +17,11 @@ elif [ "$action" = "tools" ]; then
go mod download go mod download
go install golang.org/x/tools/cmd/goimports@v0.42.0 # go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.62.0
go install github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.7.2
# go install github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs@v0.21.0
go install github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs@v0.24.0
else else
echo "Invalid action: '$action', please use $0 help for help" echo "Invalid action: '$action', please use $0 help for help"
fi fi

View file

@ -14,5 +14,5 @@ fi
mkdir -p ${ROOT_DIR}/docs mkdir -p ${ROOT_DIR}/docs
echo ">> Generating documentation" echo ">> Generating documentation"
go run github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs generate \ tfplugindocs generate \
--provider-name "stackitprivatepreview" --provider-name "stackitprivatepreview"

View file

@ -1,3 +0,0 @@
versions:
- name: alpha
path: v3alpha1

View file

@ -1,5 +0,0 @@
versions:
- name: alpha
path: v3alpha1
- name: beta
path: v3beta1

View file

@ -32,7 +32,7 @@ const (
type EphemeralProviderData struct { type EphemeralProviderData struct {
ProviderData ProviderData
PrivateKey string //nolint:gosec //this is a placeholder and not used in this code PrivateKey string
PrivateKeyPath string PrivateKeyPath string
ServiceAccountKey string ServiceAccountKey string
ServiceAccountKeyPath string ServiceAccountKeyPath string
@ -105,13 +105,11 @@ func DiagsToError(diags diag.Diagnostics) error {
diagsError := diags.Errors() diagsError := diags.Errors()
diagsStrings := make([]string, 0) diagsStrings := make([]string, 0)
for _, diagnostic := range diagsError { for _, diagnostic := range diagsError {
diagsStrings = append( diagsStrings = append(diagsStrings, fmt.Sprintf(
diagsStrings, fmt.Sprintf(
"(%s) %s", "(%s) %s",
diagnostic.Summary(), diagnostic.Summary(),
diagnostic.Detail(), diagnostic.Detail(),
), ))
)
} }
return fmt.Errorf("%s", strings.Join(diagsStrings, ";")) return fmt.Errorf("%s", strings.Join(diagsStrings, ";"))
} }
@ -138,22 +136,14 @@ func LogAndAddWarning(ctx context.Context, diags *diag.Diagnostics, summary, det
func LogAndAddWarningBeta(ctx context.Context, diags *diag.Diagnostics, name string, resourceType ResourceType) { func LogAndAddWarningBeta(ctx context.Context, diags *diag.Diagnostics, name string, resourceType ResourceType) {
warnTitle := fmt.Sprintf("The %s %q is in beta", resourceType, name) warnTitle := fmt.Sprintf("The %s %q is in beta", resourceType, name)
warnContent := fmt.Sprintf( warnContent := fmt.Sprintf("The %s %q is in beta and may be subject to breaking changes in the future. Use with caution.", resourceType, name)
"The %s %q is in beta and may be subject to breaking changes in the future. Use with caution.",
resourceType,
name,
)
tflog.Warn(ctx, fmt.Sprintf("%s | %s", warnTitle, warnContent)) tflog.Warn(ctx, fmt.Sprintf("%s | %s", warnTitle, warnContent))
diags.AddWarning(warnTitle, warnContent) diags.AddWarning(warnTitle, warnContent)
} }
func LogAndAddErrorBeta(ctx context.Context, diags *diag.Diagnostics, name string, resourceType ResourceType) { func LogAndAddErrorBeta(ctx context.Context, diags *diag.Diagnostics, name string, resourceType ResourceType) {
errTitle := fmt.Sprintf("The %s %q is in beta and beta is not enabled", resourceType, name) errTitle := fmt.Sprintf("The %s %q is in beta and beta is not enabled", resourceType, name)
errContent := fmt.Sprintf( errContent := fmt.Sprintf(`The %s %q is in beta and the beta functionality is currently not enabled. To enable it, set the environment variable STACKIT_TF_ENABLE_BETA_RESOURCES to "true" or set the "enable_beta_resources" provider field to true.`, resourceType, name)
`The %s %q is in beta and the beta functionality is currently not enabled. To enable it, set the environment variable STACKIT_TF_ENABLE_BETA_RESOURCES to "true" or set the "enable_beta_resources" provider field to true.`,
resourceType,
name,
)
tflog.Error(ctx, fmt.Sprintf("%s | %s", errTitle, errContent)) tflog.Error(ctx, fmt.Sprintf("%s | %s", errTitle, errContent))
diags.AddError(errTitle, errContent) diags.AddError(errTitle, errContent)
} }
@ -171,10 +161,8 @@ func LogResponse(ctx context.Context) context.Context {
traceId := runtime.GetTraceId(ctx) traceId := runtime.GetTraceId(ctx)
ctx = tflog.SetField(ctx, "x-trace-id", traceId) ctx = tflog.SetField(ctx, "x-trace-id", traceId)
tflog.Info( tflog.Info(ctx, "response data", map[string]interface{}{
ctx, "response data", map[string]interface{}{
"x-trace-id": traceId, "x-trace-id": traceId,
}, })
)
return ctx return ctx
} }

View file

@ -98,7 +98,7 @@ func (rrt *RetryRoundTripper) retryLoop(
waitDuration := rrt.calculateWaitDurationWithJitter(ctx, currentDelay) waitDuration := rrt.calculateWaitDurationWithJitter(ctx, currentDelay)
if err := rrt.waitForDelay(ctx, waitDuration); err != nil { if err := rrt.waitForDelay(ctx, waitDuration); err != nil {
return nil, err // Context was canceled during wait. return nil, err // Context was cancelled during wait.
} }
// Exponential backoff for the next potential retry. // Exponential backoff for the next potential retry.
@ -153,6 +153,7 @@ func (rrt *RetryRoundTripper) handleFinalError(
) error { ) error {
if resp != nil { if resp != nil {
if err := resp.Body.Close(); err != nil { if err := resp.Body.Close(); err != nil {
tflog.Warn( tflog.Warn(
ctx, "Failed to close response body", map[string]interface{}{ ctx, "Failed to close response body", map[string]interface{}{
"error": err.Error(), "error": err.Error(),
@ -193,6 +194,7 @@ func (rrt *RetryRoundTripper) shouldRetry(resp *http.Response, err error) bool {
} }
return false return false
} }
// calculateWaitDurationWithJitter calculates the backoff duration for the next retry, // calculateWaitDurationWithJitter calculates the backoff duration for the next retry,
@ -230,7 +232,7 @@ func (rrt *RetryRoundTripper) calculateWaitDurationWithJitter(
func (rrt *RetryRoundTripper) waitForDelay(ctx context.Context, delay time.Duration) error { func (rrt *RetryRoundTripper) waitForDelay(ctx context.Context, delay time.Duration) error {
select { select {
case <-ctx.Done(): case <-ctx.Done():
return fmt.Errorf("context canceled during backoff wait: %w", ctx.Err()) return fmt.Errorf("context cancelled during backoff wait: %w", ctx.Err())
case <-time.After(delay): case <-time.After(delay):
return nil return nil
} }

View file

@ -72,7 +72,7 @@ func TestRetryRoundTripper_RoundTrip(t *testing.T) {
}, },
} }
tripper := testRetryConfig(mock) tripper := testRetryConfig(mock)
req := httptest.NewRequest(http.MethodGet, "/", http.NoBody) req := httptest.NewRequest(http.MethodGet, "/", nil)
resp, err := tripper.RoundTrip(req) resp, err := tripper.RoundTrip(req)
if resp != nil { if resp != nil {
@ -110,7 +110,7 @@ func TestRetryRoundTripper_RoundTrip(t *testing.T) {
}, },
} }
tripper := testRetryConfig(mock) tripper := testRetryConfig(mock)
req := httptest.NewRequest(http.MethodGet, "/", http.NoBody) req := httptest.NewRequest(http.MethodGet, "/", nil)
resp, err := tripper.RoundTrip(req) resp, err := tripper.RoundTrip(req)
if resp != nil { if resp != nil {
@ -155,7 +155,7 @@ func TestRetryRoundTripper_RoundTrip(t *testing.T) {
}, nil }, nil
} }
tripper := testRetryConfig(mock) tripper := testRetryConfig(mock)
req := httptest.NewRequest(http.MethodGet, "/", http.NoBody) req := httptest.NewRequest(http.MethodGet, "/", nil)
resp, err := tripper.RoundTrip(req) resp, err := tripper.RoundTrip(req)
if resp != nil { if resp != nil {
@ -185,12 +185,12 @@ func TestRetryRoundTripper_RoundTrip(t *testing.T) {
mockErr := errors.New("simulated network error") mockErr := errors.New("simulated network error")
mock := &mockRoundTripper{ mock := &mockRoundTripper{
roundTripFunc: func(_ *http.Request) (*http.Response, error) { roundTripFunc: func(req *http.Request) (*http.Response, error) {
return nil, mockErr return nil, mockErr
}, },
} }
tripper := testRetryConfig(mock) tripper := testRetryConfig(mock)
req := httptest.NewRequest(http.MethodGet, "/", http.NoBody) req := httptest.NewRequest(http.MethodGet, "/", nil)
resp, err := tripper.RoundTrip(req) resp, err := tripper.RoundTrip(req)
if resp != nil { if resp != nil {
@ -211,7 +211,7 @@ func TestRetryRoundTripper_RoundTrip(t *testing.T) {
) )
t.Run( t.Run(
"should abort retries if the main context is canceled", func(t *testing.T) { "should abort retries if the main context is cancelled", func(t *testing.T) {
t.Parallel() t.Parallel()
mock := &mockRoundTripper{ mock := &mockRoundTripper{
@ -230,7 +230,7 @@ func TestRetryRoundTripper_RoundTrip(t *testing.T) {
ctx, cancel := context.WithTimeout(baseCtx, 20*time.Millisecond) ctx, cancel := context.WithTimeout(baseCtx, 20*time.Millisecond)
defer cancel() defer cancel()
req := httptest.NewRequest(http.MethodGet, "/", http.NoBody).WithContext(ctx) req := httptest.NewRequest(http.MethodGet, "/", nil).WithContext(ctx)
resp, err := tripper.RoundTrip(req) resp, err := tripper.RoundTrip(req)
if resp != nil { if resp != nil {

View file

@ -11,13 +11,13 @@ import (
"github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-log/tflog" "github.com/hashicorp/terraform-plugin-log/tflog"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
pgDsGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/datasources_gen" postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/datasources_gen"
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils" postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
) )
// Ensure the implementation satisfies the expected interfaces. // Ensure the implementation satisfies the expected interfaces.
@ -32,13 +32,13 @@ func NewDatabaseDataSource() datasource.DataSource {
// dataSourceModel maps the data source schema data. // dataSourceModel maps the data source schema data.
type dataSourceModel struct { type dataSourceModel struct {
pgDsGen.DatabaseModel postgresflexalpha2.DatabaseModel
TerraformID types.String `tfsdk:"id"` TerraformID types.String `tfsdk:"id"`
} }
// databaseDataSource is the data source implementation. // databaseDataSource is the data source implementation.
type databaseDataSource struct { type databaseDataSource struct {
client *v3alpha1api.APIClient client *postgresflexalpha.APIClient
providerData core.ProviderData providerData core.ProviderData
} }
@ -73,7 +73,7 @@ func (r *databaseDataSource) Configure(
// Schema defines the schema for the data source. // Schema defines the schema for the data source.
func (r *databaseDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { func (r *databaseDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
s := pgDsGen.DatabaseDataSourceSchema(ctx) s := postgresflexalpha2.DatabaseDataSourceSchema(ctx)
s.Attributes["id"] = schema.StringAttribute{ s.Attributes["id"] = schema.StringAttribute{
Description: "Terraform's internal resource ID. It is structured as \\\"`project_id`,`region`,`instance_id`," + Description: "Terraform's internal resource ID. It is structured as \\\"`project_id`,`region`,`instance_id`," +
"`database_id`\\\".\",", "`database_id`\\\".\",",
@ -144,7 +144,7 @@ func (r *databaseDataSource) getDatabaseByNameOrID(
model *dataSourceModel, model *dataSourceModel,
projectId, region, instanceId string, projectId, region, instanceId string,
diags *diag.Diagnostics, diags *diag.Diagnostics,
) (*v3alpha1api.ListDatabase, error) { ) (*postgresflexalpha.ListDatabase, error) {
isIdSet := !model.DatabaseId.IsNull() && !model.DatabaseId.IsUnknown() isIdSet := !model.DatabaseId.IsNull() && !model.DatabaseId.IsUnknown()
isNameSet := !model.Name.IsNull() && !model.Name.IsUnknown() isNameSet := !model.Name.IsNull() && !model.Name.IsUnknown()
@ -159,12 +159,12 @@ func (r *databaseDataSource) getDatabaseByNameOrID(
if isIdSet { if isIdSet {
databaseId := model.DatabaseId.ValueInt64() databaseId := model.DatabaseId.ValueInt64()
ctx = tflog.SetField(ctx, "database_id", databaseId) ctx = tflog.SetField(ctx, "database_id", databaseId)
return getDatabaseById(ctx, r.client.DefaultAPI, projectId, region, instanceId, databaseId) return getDatabaseById(ctx, r.client, projectId, region, instanceId, databaseId)
} }
databaseName := model.Name.ValueString() databaseName := model.Name.ValueString()
ctx = tflog.SetField(ctx, "name", databaseName) ctx = tflog.SetField(ctx, "name", databaseName)
return getDatabaseByName(ctx, r.client.DefaultAPI, projectId, region, instanceId, databaseName) return getDatabaseByName(ctx, r.client, projectId, region, instanceId, databaseName)
} }
// handleReadError centralizes API error handling for the Read operation. // handleReadError centralizes API error handling for the Read operation.

View file

@ -5,7 +5,7 @@ import (
"fmt" "fmt"
"strings" "strings"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api" postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
) )
// databaseClientReader represents the contract to listing databases from postgresflex.APIClient. // databaseClientReader represents the contract to listing databases from postgresflex.APIClient.
@ -15,7 +15,7 @@ type databaseClientReader interface {
projectId string, projectId string,
region string, region string,
instanceId string, instanceId string,
) v3alpha1api.ApiListDatabasesRequestRequest ) postgresflex.ApiListDatabasesRequestRequest
} }
// getDatabaseById gets a database by its ID. // getDatabaseById gets a database by its ID.
@ -24,9 +24,9 @@ func getDatabaseById(
client databaseClientReader, client databaseClientReader,
projectId, region, instanceId string, projectId, region, instanceId string,
databaseId int64, databaseId int64,
) (*v3alpha1api.ListDatabase, error) { ) (*postgresflex.ListDatabase, error) {
filter := func(db v3alpha1api.ListDatabase) bool { filter := func(db postgresflex.ListDatabase) bool {
return int64(db.Id) == databaseId return db.Id != nil && *db.Id == databaseId
} }
return getDatabase(ctx, client, projectId, region, instanceId, filter) return getDatabase(ctx, client, projectId, region, instanceId, filter)
} }
@ -36,9 +36,9 @@ func getDatabaseByName(
ctx context.Context, ctx context.Context,
client databaseClientReader, client databaseClientReader,
projectId, region, instanceId, databaseName string, projectId, region, instanceId, databaseName string,
) (*v3alpha1api.ListDatabase, error) { ) (*postgresflex.ListDatabase, error) {
filter := func(db v3alpha1api.ListDatabase) bool { filter := func(db postgresflex.ListDatabase) bool {
return db.Name == databaseName return db.Name != nil && *db.Name == databaseName
} }
return getDatabase(ctx, client, projectId, region, instanceId, filter) return getDatabase(ctx, client, projectId, region, instanceId, filter)
} }
@ -49,8 +49,8 @@ func getDatabase(
ctx context.Context, ctx context.Context,
client databaseClientReader, client databaseClientReader,
projectId, region, instanceId string, projectId, region, instanceId string,
filter func(db v3alpha1api.ListDatabase) bool, filter func(db postgresflex.ListDatabase) bool,
) (*v3alpha1api.ListDatabase, error) { ) (*postgresflex.ListDatabase, error) {
if projectId == "" || region == "" || instanceId == "" { if projectId == "" || region == "" || instanceId == "" {
return nil, fmt.Errorf("all parameters (project, region, instance) are required") return nil, fmt.Errorf("all parameters (project, region, instance) are required")
} }
@ -59,18 +59,18 @@ func getDatabase(
for page := int32(1); ; page++ { for page := int32(1); ; page++ {
res, err := client.ListDatabasesRequest(ctx, projectId, region, instanceId). res, err := client.ListDatabasesRequest(ctx, projectId, region, instanceId).
Page(page).Size(pageSize).Sort(v3alpha1api.DATABASESORT_DATABASE_ID_ASC).Execute() Page(page).Size(pageSize).Sort(postgresflex.DATABASESORT_DATABASE_ID_ASC).Execute()
if err != nil { if err != nil {
return nil, fmt.Errorf("requesting database list (page %d): %w", page, err) return nil, fmt.Errorf("requesting database list (page %d): %w", page, err)
} }
// If the API returns no databases, we have reached the end of the list. // If the API returns no databases, we have reached the end of the list.
if len(res.Databases) == 0 { if res.Databases == nil || len(*res.Databases) == 0 {
break break
} }
// Iterate over databases to find a match // Iterate over databases to find a match
for _, db := range res.Databases { for _, db := range *res.Databases {
if filter(db) { if filter(db) {
foundDb := db foundDb := db
return &foundDb, nil return &foundDb, nil
@ -82,6 +82,10 @@ func getDatabase(
} }
// cleanString removes leading and trailing quotes which are sometimes returned by the API. // cleanString removes leading and trailing quotes which are sometimes returned by the API.
func cleanString(s string) string { func cleanString(s *string) *string {
return strings.Trim(s, "\"") if s == nil {
return nil
}
res := strings.Trim(*s, "\"")
return &res
} }

View file

@ -5,99 +5,127 @@ import (
"testing" "testing"
"github.com/google/go-cmp/cmp" "github.com/google/go-cmp/cmp"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api" "github.com/stackitcloud/stackit-sdk-go/core/utils"
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
) )
type mockRequest struct {
executeFunc func() (*postgresflex.ListDatabasesResponse, error)
}
func (m *mockRequest) Page(_ int32) postgresflex.ApiListDatabasesRequestRequest { return m }
func (m *mockRequest) Size(_ int32) postgresflex.ApiListDatabasesRequestRequest { return m }
func (m *mockRequest) Sort(_ postgresflex.DatabaseSort) postgresflex.ApiListDatabasesRequestRequest {
return m
}
func (m *mockRequest) Execute() (*postgresflex.ListDatabasesResponse, error) {
return m.executeFunc()
}
type mockDBClient struct {
executeRequest func() postgresflex.ApiListDatabasesRequestRequest
}
var _ databaseClientReader = (*mockDBClient)(nil)
func (m *mockDBClient) ListDatabasesRequest(
_ context.Context,
_, _, _ string,
) postgresflex.ApiListDatabasesRequestRequest {
return m.executeRequest()
}
func TestGetDatabase(t *testing.T) { func TestGetDatabase(t *testing.T) {
mockResp := func(page int32) (*v3alpha1api.ListDatabasesResponse, error) { mockResp := func(page int64) (*postgresflex.ListDatabasesResponse, error) {
if page == 1 { if page == 1 {
return &v3alpha1api.ListDatabasesResponse{ return &postgresflex.ListDatabasesResponse{
Databases: []v3alpha1api.ListDatabase{ Databases: &[]postgresflex.ListDatabase{
{Id: int32(1), Name: "first"}, {Id: utils.Ptr(int64(1)), Name: utils.Ptr("first")},
{Id: int32(2), Name: "second"}, {Id: utils.Ptr(int64(2)), Name: utils.Ptr("second")},
}, },
Pagination: v3alpha1api.Pagination{ Pagination: &postgresflex.Pagination{
Page: int32(1), Page: utils.Ptr(int64(1)),
TotalPages: int32(2), TotalPages: utils.Ptr(int64(2)),
Size: int32(3), Size: utils.Ptr(int64(3)),
}, },
}, nil }, nil
} }
if page == 2 { if page == 2 {
return &v3alpha1api.ListDatabasesResponse{ return &postgresflex.ListDatabasesResponse{
Databases: []v3alpha1api.ListDatabase{{Id: int32(3), Name: "three"}}, Databases: &[]postgresflex.ListDatabase{{Id: utils.Ptr(int64(3)), Name: utils.Ptr("three")}},
Pagination: v3alpha1api.Pagination{ Pagination: &postgresflex.Pagination{
Page: int32(2), Page: utils.Ptr(int64(2)),
TotalPages: int32(2), TotalPages: utils.Ptr(int64(2)),
Size: int32(3), Size: utils.Ptr(int64(3)),
}, },
}, nil }, nil
} }
return &v3alpha1api.ListDatabasesResponse{ return &postgresflex.ListDatabasesResponse{
Databases: []v3alpha1api.ListDatabase{}, Databases: &[]postgresflex.ListDatabase{},
Pagination: v3alpha1api.Pagination{ Pagination: &postgresflex.Pagination{
Page: int32(3), Page: utils.Ptr(int64(3)),
TotalPages: int32(2), TotalPages: utils.Ptr(int64(2)),
Size: int32(3), Size: utils.Ptr(int64(3)),
}, },
}, nil }, nil
} }
tests := []struct { tests := []struct {
description string description string
projectID string projectId string
region string region string
instanceID string instanceId string
wantErr bool wantErr bool
wantDbName string wantDbName string
wantDbID int32 wantDbId int64
}{ }{
{ {
description: "Success - Found by name on first page", description: "Success - Found by name on first page",
projectID: "pid", region: "reg", instanceID: "inst", projectId: "pid", region: "reg", instanceId: "inst",
wantErr: false, wantErr: false,
wantDbName: "second", wantDbName: "second",
}, },
{ {
description: "Success - Found by id on first page", description: "Success - Found by id on first page",
projectID: "pid", region: "reg", instanceID: "inst", projectId: "pid", region: "reg", instanceId: "inst",
wantErr: false, wantErr: false,
wantDbID: 2, wantDbId: 2,
}, },
{ {
description: "Success - Found by name on second page", description: "Success - Found by name on second page",
projectID: "pid", region: "reg", instanceID: "inst", projectId: "pid", region: "reg", instanceId: "inst",
wantErr: false, wantErr: false,
wantDbName: "three", wantDbName: "three",
}, },
{ {
description: "Success - Found by id on second page", description: "Success - Found by id on second page",
projectID: "pid", region: "reg", instanceID: "inst", projectId: "pid", region: "reg", instanceId: "inst",
wantErr: false, wantErr: false,
wantDbID: 1, wantDbId: 1,
}, },
{ {
description: "Error - API failure", description: "Error - API failure",
projectID: "pid", region: "reg", instanceID: "inst", projectId: "pid", region: "reg", instanceId: "inst",
wantErr: true, wantErr: true,
}, },
{ {
description: "Error - Missing parameters", description: "Error - Missing parameters",
projectID: "", region: "reg", instanceID: "inst", projectId: "", region: "reg", instanceId: "inst",
wantErr: true, wantErr: true,
}, },
{ {
description: "Error - Search by name not found after all pages", description: "Error - Search by name not found after all pages",
projectID: "pid", region: "reg", instanceID: "inst", projectId: "pid", region: "reg", instanceId: "inst",
wantDbName: "non-existent", wantDbName: "non-existent",
wantErr: true, wantErr: true,
}, },
{ {
description: "Error - Search by id not found after all pages", description: "Error - Search by id not found after all pages",
projectID: "pid", region: "reg", instanceID: "inst", projectId: "pid", region: "reg", instanceId: "inst",
wantDbID: 999999, wantDbId: 999999,
wantErr: true, wantErr: true,
}, },
} }
@ -105,46 +133,47 @@ func TestGetDatabase(t *testing.T) {
for _, tt := range tests { for _, tt := range tests {
t.Run( t.Run(
tt.description, func(t *testing.T) { tt.description, func(t *testing.T) {
var currentPage int32 var currentPage int64
client := &mockDBClient{
mockCall := func(_ v3alpha1api.ApiListDatabasesRequestRequest) (*v3alpha1api.ListDatabasesResponse, error) { executeRequest: func() postgresflex.ApiListDatabasesRequestRequest {
return &mockRequest{
executeFunc: func() (*postgresflex.ListDatabasesResponse, error) {
currentPage++ currentPage++
return mockResp(currentPage) return mockResp(currentPage)
},
}
},
} }
client := &v3alpha1api.DefaultAPIServiceMock{ var actual *postgresflex.ListDatabase
ListDatabasesRequestExecuteMock: &mockCall,
}
var actual *v3alpha1api.ListDatabase
var errDB error var errDB error
if tt.wantDbName != "" { if tt.wantDbName != "" {
actual, errDB = getDatabaseByName( actual, errDB = getDatabaseByName(
t.Context(), t.Context(),
client, client,
tt.projectID, tt.projectId,
tt.region, tt.region,
tt.instanceID, tt.instanceId,
tt.wantDbName, tt.wantDbName,
) )
} else if tt.wantDbID != 0 { } else if tt.wantDbId != 0 {
actual, errDB = getDatabaseById( actual, errDB = getDatabaseById(
t.Context(), t.Context(),
client, client,
tt.projectID, tt.projectId,
tt.region, tt.region,
tt.instanceID, tt.instanceId,
int64(tt.wantDbID), tt.wantDbId,
) )
} else { } else {
actual, errDB = getDatabase( actual, errDB = getDatabase(
context.Background(), context.Background(),
client, client,
tt.projectID, tt.projectId,
tt.region, tt.region,
tt.instanceID, tt.instanceId,
func(_ v3alpha1api.ListDatabase) bool { return false }, func(_ postgresflex.ListDatabase) bool { return false },
) )
} }
@ -153,14 +182,14 @@ func TestGetDatabase(t *testing.T) {
return return
} }
if !tt.wantErr && tt.wantDbName != "" && actual != nil { if !tt.wantErr && tt.wantDbName != "" && actual != nil {
if actual.Name != tt.wantDbName { if *actual.Name != tt.wantDbName {
t.Errorf("getDatabaseByNameOrID() got name = %v, want %v", actual.Name, tt.wantDbName) t.Errorf("getDatabaseByNameOrID() got name = %v, want %v", *actual.Name, tt.wantDbName)
} }
} }
if !tt.wantErr && tt.wantDbID != 0 && actual != nil { if !tt.wantErr && tt.wantDbId != 0 && actual != nil {
if actual.Id != tt.wantDbID { if *actual.Id != tt.wantDbId {
t.Errorf("getDatabaseByNameOrID() got id = %v, want %v", actual.Id, tt.wantDbID) t.Errorf("getDatabaseByNameOrID() got id = %v, want %v", *actual.Id, tt.wantDbId)
} }
} }
}, },
@ -171,18 +200,23 @@ func TestGetDatabase(t *testing.T) {
func TestCleanString(t *testing.T) { func TestCleanString(t *testing.T) {
testcases := []struct { testcases := []struct {
name string name string
given string given *string
expected string expected *string
}{ }{
{ {
name: "should remove quotes", name: "should remove quotes",
given: "\"quoted\"", given: utils.Ptr("\"quoted\""),
expected: "quoted", expected: utils.Ptr("quoted"),
},
{
name: "should handle nil",
given: nil,
expected: nil,
}, },
{ {
name: "should not change unquoted string", name: "should not change unquoted string",
given: "unquoted", given: utils.Ptr("unquoted"),
expected: "unquoted", expected: utils.Ptr("unquoted"),
}, },
} }

View file

@ -5,21 +5,21 @@ import (
"strconv" "strconv"
"github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-framework/types"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
) )
// mapFields maps fields from a ListDatabase API response to a resourceModel for the data source. // mapFields maps fields from a ListDatabase API response to a resourceModel for the data source.
func mapFields( func mapFields(
source *v3alpha1api.ListDatabase, source *postgresflexalpha.ListDatabase,
model *dataSourceModel, model *dataSourceModel,
region string, region string,
) error { ) error {
if source == nil { if source == nil {
return fmt.Errorf("response is nil") return fmt.Errorf("response is nil")
} }
if source.Id == 0 { if source.Id == nil || *source.Id == 0 {
return fmt.Errorf("id not present") return fmt.Errorf("id not present")
} }
if model == nil { if model == nil {
@ -29,8 +29,8 @@ func mapFields(
var databaseId int64 var databaseId int64
if model.DatabaseId.ValueInt64() != 0 { if model.DatabaseId.ValueInt64() != 0 {
databaseId = model.DatabaseId.ValueInt64() databaseId = model.DatabaseId.ValueInt64()
} else if source.Id != 0 { } else if source.Id != nil {
databaseId = int64(source.Id) databaseId = *source.Id
} else { } else {
return fmt.Errorf("database id not present") return fmt.Errorf("database id not present")
} }
@ -38,7 +38,7 @@ func mapFields(
model.Id = types.Int64Value(databaseId) model.Id = types.Int64Value(databaseId)
model.DatabaseId = types.Int64Value(databaseId) model.DatabaseId = types.Int64Value(databaseId)
model.Name = types.StringValue(source.GetName()) model.Name = types.StringValue(source.GetName())
model.Owner = types.StringValue(cleanString(source.Owner)) model.Owner = types.StringPointerValue(cleanString(source.Owner))
model.Region = types.StringValue(region) model.Region = types.StringValue(region)
model.ProjectId = types.StringValue(model.ProjectId.ValueString()) model.ProjectId = types.StringValue(model.ProjectId.ValueString())
model.InstanceId = types.StringValue(model.InstanceId.ValueString()) model.InstanceId = types.StringValue(model.InstanceId.ValueString())
@ -53,11 +53,11 @@ func mapFields(
} }
// mapResourceFields maps fields from a GetDatabase API response to a resourceModel for the resource. // mapResourceFields maps fields from a GetDatabase API response to a resourceModel for the resource.
func mapResourceFields(source *v3alpha1api.GetDatabaseResponse, model *resourceModel) error { func mapResourceFields(source *postgresflexalpha.GetDatabaseResponse, model *resourceModel) error {
if source == nil { if source == nil {
return fmt.Errorf("response is nil") return fmt.Errorf("response is nil")
} }
if source.Id == 0 { if source.Id == nil || *source.Id == 0 {
return fmt.Errorf("id not present") return fmt.Errorf("id not present")
} }
if model == nil { if model == nil {
@ -67,8 +67,8 @@ func mapResourceFields(source *v3alpha1api.GetDatabaseResponse, model *resourceM
var databaseId int64 var databaseId int64
if model.Id.ValueInt64() != 0 { if model.Id.ValueInt64() != 0 {
databaseId = model.Id.ValueInt64() databaseId = model.Id.ValueInt64()
} else if source.Id != 0 { } else if source.Id != nil {
databaseId = int64(source.Id) databaseId = *source.Id
} else { } else {
return fmt.Errorf("database id not present") return fmt.Errorf("database id not present")
} }
@ -76,18 +76,18 @@ func mapResourceFields(source *v3alpha1api.GetDatabaseResponse, model *resourceM
model.Id = types.Int64Value(databaseId) model.Id = types.Int64Value(databaseId)
model.DatabaseId = types.Int64Value(databaseId) model.DatabaseId = types.Int64Value(databaseId)
model.Name = types.StringValue(source.GetName()) model.Name = types.StringValue(source.GetName())
model.Owner = types.StringValue(cleanString(source.Owner)) model.Owner = types.StringPointerValue(cleanString(source.Owner))
return nil return nil
} }
// toCreatePayload converts the resource model to an API create payload. // toCreatePayload converts the resource model to an API create payload.
func toCreatePayload(model *resourceModel) (*v3alpha1api.CreateDatabaseRequestPayload, error) { func toCreatePayload(model *resourceModel) (*postgresflexalpha.CreateDatabaseRequestPayload, error) {
if model == nil { if model == nil {
return nil, fmt.Errorf("nil model") return nil, fmt.Errorf("nil model")
} }
return &v3alpha1api.CreateDatabaseRequestPayload{ return &postgresflexalpha.CreateDatabaseRequestPayload{
Name: model.Name.ValueString(), Name: model.Name.ValueStringPointer(),
Owner: model.Owner.ValueStringPointer(), Owner: model.Owner.ValueStringPointer(),
}, nil }, nil
} }

View file

@ -7,8 +7,7 @@ import (
"github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-framework/types"
"github.com/stackitcloud/stackit-sdk-go/core/utils" "github.com/stackitcloud/stackit-sdk-go/core/utils"
postgresflexalpha "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
datasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/datasources_gen" datasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/datasources_gen"
) )
@ -32,9 +31,9 @@ func TestMapFields(t *testing.T) {
name: "should map fields correctly", name: "should map fields correctly",
given: given{ given: given{
source: &postgresflexalpha.ListDatabase{ source: &postgresflexalpha.ListDatabase{
Id: int32(1), Id: utils.Ptr(int64(1)),
Name: "my-db", Name: utils.Ptr("my-db"),
Owner: "my-owner", Owner: utils.Ptr("\"my-owner\""),
}, },
model: &dataSourceModel{ model: &dataSourceModel{
DatabaseModel: datasource.DatabaseModel{ DatabaseModel: datasource.DatabaseModel{
@ -63,8 +62,8 @@ func TestMapFields(t *testing.T) {
name: "should preserve existing model ID", name: "should preserve existing model ID",
given: given{ given: given{
source: &postgresflexalpha.ListDatabase{ source: &postgresflexalpha.ListDatabase{
Id: int32(1), Id: utils.Ptr(int64(1)),
Name: "my-db", Name: utils.Ptr("my-db"),
}, },
model: &dataSourceModel{ model: &dataSourceModel{
DatabaseModel: datasource.DatabaseModel{ DatabaseModel: datasource.DatabaseModel{
@ -80,8 +79,7 @@ func TestMapFields(t *testing.T) {
DatabaseModel: datasource.DatabaseModel{ DatabaseModel: datasource.DatabaseModel{
Id: types.Int64Value(1), Id: types.Int64Value(1),
Name: types.StringValue("my-db"), Name: types.StringValue("my-db"),
Owner: types.StringValue(""), Owner: types.StringNull(), DatabaseId: types.Int64Value(1),
DatabaseId: types.Int64Value(1),
Region: types.StringValue("eu01"), Region: types.StringValue("eu01"),
InstanceId: types.StringValue("my-instance"), InstanceId: types.StringValue("my-instance"),
ProjectId: types.StringValue("my-project"), ProjectId: types.StringValue("my-project"),
@ -101,7 +99,7 @@ func TestMapFields(t *testing.T) {
{ {
name: "should fail on nil source ID", name: "should fail on nil source ID",
given: given{ given: given{
source: &postgresflexalpha.ListDatabase{Id: 0}, source: &postgresflexalpha.ListDatabase{Id: nil},
model: &dataSourceModel{}, model: &dataSourceModel{},
}, },
expected: expected{err: true}, expected: expected{err: true},
@ -109,7 +107,7 @@ func TestMapFields(t *testing.T) {
{ {
name: "should fail on nil model", name: "should fail on nil model",
given: given{ given: given{
source: &postgresflexalpha.ListDatabase{Id: int32(1)}, source: &postgresflexalpha.ListDatabase{Id: utils.Ptr(int64(1))},
model: nil, model: nil,
}, },
expected: expected{err: true}, expected: expected{err: true},
@ -152,9 +150,9 @@ func TestMapResourceFields(t *testing.T) {
name: "should map fields correctly", name: "should map fields correctly",
given: given{ given: given{
source: &postgresflexalpha.GetDatabaseResponse{ source: &postgresflexalpha.GetDatabaseResponse{
Id: int32(1), Id: utils.Ptr(int64(1)),
Name: "my-db", Name: utils.Ptr("my-db"),
Owner: "my-owner", Owner: utils.Ptr("my-owner"),
}, },
model: &resourceModel{}, model: &resourceModel{},
}, },
@ -218,7 +216,7 @@ func TestToCreatePayload(t *testing.T) {
}, },
expected: expected{ expected: expected{
payload: &postgresflexalpha.CreateDatabaseRequestPayload{ payload: &postgresflexalpha.CreateDatabaseRequestPayload{
Name: "my-db", Name: utils.Ptr("my-db"),
Owner: utils.Ptr("my-owner"), Owner: utils.Ptr("my-owner"),
}, },
}, },

View file

@ -14,14 +14,14 @@ import (
"github.com/hashicorp/terraform-plugin-framework/resource/identityschema" "github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
"github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-log/tflog" "github.com/hashicorp/terraform-plugin-log/tflog"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
postgresflexalphaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/resources_gen" postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/resources_gen"
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils" postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
postgresflexalphaWait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/postgresflexalpha" postgresflexalpha3 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/postgresflexalpha"
) )
var ( var (
@ -43,7 +43,7 @@ func NewDatabaseResource() resource.Resource {
} }
// resourceModel describes the resource data model. // resourceModel describes the resource data model.
type resourceModel = postgresflexalphaResGen.DatabaseModel type resourceModel = postgresflexalpha2.DatabaseModel
// DatabaseResourceIdentityModel describes the resource's identity attributes. // DatabaseResourceIdentityModel describes the resource's identity attributes.
type DatabaseResourceIdentityModel struct { type DatabaseResourceIdentityModel struct {
@ -55,7 +55,7 @@ type DatabaseResourceIdentityModel struct {
// databaseResource is the resource implementation. // databaseResource is the resource implementation.
type databaseResource struct { type databaseResource struct {
client *v3alpha1api.APIClient client *postgresflexalpha.APIClient
providerData core.ProviderData providerData core.ProviderData
} }
@ -122,7 +122,7 @@ var modifiersFileByte []byte
// Schema defines the schema for the resource. // Schema defines the schema for the resource.
func (r *databaseResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { func (r *databaseResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
s := postgresflexalphaResGen.DatabaseResourceSchema(ctx) s := postgresflexalpha2.DatabaseResourceSchema(ctx)
fields, err := utils.ReadModifiersConfig(modifiersFileByte) fields, err := utils.ReadModifiersConfig(modifiersFileByte)
if err != nil { if err != nil {
@ -198,7 +198,7 @@ func (r *databaseResource) Create(
return return
} }
// Create new database // Create new database
databaseResp, err := r.client.DefaultAPI.CreateDatabaseRequest( databaseResp, err := r.client.CreateDatabaseRequest(
ctx, ctx,
projectId, projectId,
region, region,
@ -209,17 +209,16 @@ func (r *databaseResource) Create(
return return
} }
dbID, ok := databaseResp.GetIdOk() if databaseResp == nil || databaseResp.Id == nil {
if !ok {
core.LogAndAddError( core.LogAndAddError(
ctx, ctx,
&resp.Diagnostics, &resp.Diagnostics,
funcErrorSummary, funcErrorSummary,
"API didn't return database Id. A database might although have been created", "API didn't return database Id. A database might have been created",
) )
return return
} }
databaseId := int64(*dbID) databaseId := *databaseResp.Id
ctx = tflog.SetField(ctx, "database_id", databaseId) ctx = tflog.SetField(ctx, "database_id", databaseId)
ctx = core.LogResponse(ctx) ctx = core.LogResponse(ctx)
@ -235,7 +234,7 @@ func (r *databaseResource) Create(
return return
} }
database, err := postgresflexalphaWait.GetDatabaseByIdWaitHandler(ctx, r.client.DefaultAPI, projectId, instanceId, region, databaseId). database, err := postgresflexalpha3.GetDatabaseByIdWaitHandler(ctx, r.client, projectId, instanceId, region, databaseId).
SetTimeout(15 * time.Minute). SetTimeout(15 * time.Minute).
SetSleepBeforeWait(15 * time.Second). SetSleepBeforeWait(15 * time.Second).
WaitWithContext(ctx) WaitWithContext(ctx)
@ -294,7 +293,7 @@ func (r *databaseResource) Read(
ctx = tflog.SetField(ctx, "region", region) ctx = tflog.SetField(ctx, "region", region)
ctx = tflog.SetField(ctx, "database_id", databaseId) ctx = tflog.SetField(ctx, "database_id", databaseId)
databaseResp, err := postgresflexalphaWait.GetDatabaseByIdWaitHandler(ctx, r.client.DefaultAPI, projectId, instanceId, region, databaseId). databaseResp, err := postgresflexalpha3.GetDatabaseByIdWaitHandler(ctx, r.client, projectId, instanceId, region, databaseId).
SetTimeout(15 * time.Minute). SetTimeout(15 * time.Minute).
SetSleepBeforeWait(15 * time.Second). SetSleepBeforeWait(15 * time.Second).
WaitWithContext(ctx) WaitWithContext(ctx)
@ -322,12 +321,13 @@ func (r *databaseResource) Read(
return return
} }
// TODO: use values from api to identify drift
// Save identity into Terraform state // Save identity into Terraform state
identity := DatabaseResourceIdentityModel{ identity := DatabaseResourceIdentityModel{
ProjectID: types.StringValue(projectId), ProjectID: types.StringValue(projectId),
Region: types.StringValue(region), Region: types.StringValue(region),
InstanceID: types.StringValue(instanceId), InstanceID: types.StringValue(instanceId),
DatabaseID: types.Int64Value(int64(databaseResp.GetId())), DatabaseID: types.Int64Value(databaseId),
} }
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
if resp.Diagnostics.HasError() { if resp.Diagnostics.HasError() {
@ -356,12 +356,30 @@ func (r *databaseResource) Update(
return return
} }
// Read identity data
var identityData DatabaseResourceIdentityModel
resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
if resp.Diagnostics.HasError() {
return
}
ctx = core.InitProviderContext(ctx) ctx = core.InitProviderContext(ctx)
projectId := model.ProjectId.ValueString() projectId, region, instanceId, databaseId64, errExt := r.extractIdentityData(model, identityData)
instanceId := model.InstanceId.ValueString() if errExt != nil {
region := model.Region.ValueString() core.LogAndAddError(
databaseId := model.DatabaseId.ValueInt64() ctx,
&resp.Diagnostics,
extractErrorSummary,
fmt.Sprintf(extractErrorMessage, errExt),
)
}
if databaseId64 > math.MaxInt32 {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (databaseId)")
return
}
databaseId := int32(databaseId64) // nolint:gosec // check is performed above
ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "instance_id", instanceId) ctx = tflog.SetField(ctx, "instance_id", instanceId)
@ -377,7 +395,7 @@ func (r *databaseResource) Update(
} }
modified := false modified := false
var payload v3alpha1api.UpdateDatabasePartiallyRequestPayload var payload postgresflexalpha.UpdateDatabasePartiallyRequestPayload
if stateModel.Name != model.Name { if stateModel.Name != model.Name {
payload.Name = model.Name.ValueStringPointer() payload.Name = model.Name.ValueStringPointer()
modified = true modified = true
@ -393,18 +411,13 @@ func (r *databaseResource) Update(
return return
} }
if databaseId > math.MaxInt32 {
core.LogAndAddError(ctx, &resp.Diagnostics, "error updating database", "databaseID out of bounds for int32")
return
}
databaseID32 := int32(databaseId) //nolint:gosec // TODO
// Update existing database // Update existing database
err := r.client.DefaultAPI.UpdateDatabasePartiallyRequest( err := r.client.UpdateDatabasePartiallyRequest(
ctx, ctx,
projectId, projectId,
region, region,
instanceId, instanceId,
databaseID32, databaseId,
).UpdateDatabasePartiallyRequestPayload(payload).Execute() ).UpdateDatabasePartiallyRequestPayload(payload).Execute()
if err != nil { if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "error updating database", err.Error()) core.LogAndAddError(ctx, &resp.Diagnostics, "error updating database", err.Error())
@ -413,7 +426,7 @@ func (r *databaseResource) Update(
ctx = core.LogResponse(ctx) ctx = core.LogResponse(ctx)
databaseResp, err := postgresflexalphaWait.GetDatabaseByIdWaitHandler(ctx, r.client.DefaultAPI, projectId, instanceId, region, databaseId). databaseResp, err := postgresflexalpha3.GetDatabaseByIdWaitHandler(ctx, r.client, projectId, instanceId, region, databaseId64).
SetTimeout(15 * time.Minute). SetTimeout(15 * time.Minute).
SetSleepBeforeWait(15 * time.Second). SetSleepBeforeWait(15 * time.Second).
WaitWithContext(ctx) WaitWithContext(ctx)
@ -441,7 +454,7 @@ func (r *databaseResource) Update(
ProjectID: types.StringValue(projectId), ProjectID: types.StringValue(projectId),
Region: types.StringValue(region), Region: types.StringValue(region),
InstanceID: types.StringValue(instanceId), InstanceID: types.StringValue(instanceId),
DatabaseID: types.Int64Value(databaseId), DatabaseID: types.Int64Value(databaseId64),
} }
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
if resp.Diagnostics.HasError() { if resp.Diagnostics.HasError() {
@ -499,7 +512,7 @@ func (r *databaseResource) Delete(
ctx = tflog.SetField(ctx, "database_id", databaseId) ctx = tflog.SetField(ctx, "database_id", databaseId)
// Delete existing record set // Delete existing record set
err := r.client.DefaultAPI.DeleteDatabaseRequest(ctx, projectId, region, instanceId, databaseId).Execute() err := r.client.DeleteDatabaseRequestExecute(ctx, projectId, region, instanceId, databaseId)
if err != nil { if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting database", fmt.Sprintf("Calling API: %v", err)) core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting database", fmt.Sprintf("Calling API: %v", err))
} }

View file

@ -1,4 +1,4 @@
package postgresflexalphaflavor package postgresFlexAlphaFlavor
import ( import (
"context" "context"
@ -8,8 +8,8 @@ import (
"github.com/hashicorp/terraform-plugin-framework/datasource/schema" "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
"github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes" "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
postgresflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavors/datasources_gen" postgresflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavors/datasources_gen"
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils" postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
@ -30,13 +30,13 @@ type FlavorModel struct {
ProjectId types.String `tfsdk:"project_id"` ProjectId types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"` Region types.String `tfsdk:"region"`
StorageClass types.String `tfsdk:"storage_class"` StorageClass types.String `tfsdk:"storage_class"`
Cpu types.Int32 `tfsdk:"cpu"` Cpu types.Int64 `tfsdk:"cpu"`
Description types.String `tfsdk:"description"` Description types.String `tfsdk:"description"`
Id types.String `tfsdk:"id"` Id types.String `tfsdk:"id"`
FlavorId types.String `tfsdk:"flavor_id"` FlavorId types.String `tfsdk:"flavor_id"`
MaxGb types.Int32 `tfsdk:"max_gb"` MaxGb types.Int64 `tfsdk:"max_gb"`
Memory types.Int32 `tfsdk:"ram"` Memory types.Int64 `tfsdk:"ram"`
MinGb types.Int32 `tfsdk:"min_gb"` MinGb types.Int64 `tfsdk:"min_gb"`
NodeType types.String `tfsdk:"node_type"` NodeType types.String `tfsdk:"node_type"`
StorageClasses types.List `tfsdk:"storage_classes"` StorageClasses types.List `tfsdk:"storage_classes"`
} }
@ -48,7 +48,7 @@ func NewFlavorDataSource() datasource.DataSource {
// flavorDataSource is the data source implementation. // flavorDataSource is the data source implementation.
type flavorDataSource struct { type flavorDataSource struct {
client *v3alpha1api.APIClient client *postgresflexalpha.APIClient
providerData core.ProviderData providerData core.ProviderData
} }
@ -86,12 +86,12 @@ func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaReques
Description: "The flavor description.", Description: "The flavor description.",
MarkdownDescription: "The flavor description.", MarkdownDescription: "The flavor description.",
}, },
"cpu": schema.Int32Attribute{ "cpu": schema.Int64Attribute{
Required: true, Required: true,
Description: "The cpu count of the instance.", Description: "The cpu count of the instance.",
MarkdownDescription: "The cpu count of the instance.", MarkdownDescription: "The cpu count of the instance.",
}, },
"ram": schema.Int32Attribute{ "ram": schema.Int64Attribute{
Required: true, Required: true,
Description: "The memory of the instance in Gibibyte.", Description: "The memory of the instance in Gibibyte.",
MarkdownDescription: "The memory of the instance in Gibibyte.", MarkdownDescription: "The memory of the instance in Gibibyte.",
@ -116,12 +116,12 @@ func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaReques
Description: "The flavor id of the instance flavor.", Description: "The flavor id of the instance flavor.",
MarkdownDescription: "The flavor id of the instance flavor.", MarkdownDescription: "The flavor id of the instance flavor.",
}, },
"max_gb": schema.Int32Attribute{ "max_gb": schema.Int64Attribute{
Computed: true, Computed: true,
Description: "maximum storage which can be ordered for the flavor in Gigabyte.", Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.", MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
}, },
"min_gb": schema.Int32Attribute{ "min_gb": schema.Int64Attribute{
Computed: true, Computed: true,
Description: "minimum storage which is required to order in Gigabyte.", Description: "minimum storage which is required to order in Gigabyte.",
MarkdownDescription: "minimum storage which is required to order in Gigabyte.", MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
@ -138,10 +138,10 @@ func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaReques
"class": schema.StringAttribute{ "class": schema.StringAttribute{
Computed: true, Computed: true,
}, },
"max_io_per_sec": schema.Int32Attribute{ "max_io_per_sec": schema.Int64Attribute{
Computed: true, Computed: true,
}, },
"max_through_in_mb": schema.Int32Attribute{ "max_through_in_mb": schema.Int64Attribute{
Computed: true, Computed: true,
}, },
}, },
@ -171,25 +171,25 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest,
ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "region", region) ctx = tflog.SetField(ctx, "region", region)
flavors, err := getAllFlavors(ctx, r.client.DefaultAPI, projectId, region) flavors, err := getAllFlavors(ctx, r.client, projectId, region)
if err != nil { if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading flavors", fmt.Sprintf("getAllFlavors: %v", err)) core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading flavors", fmt.Sprintf("getAllFlavors: %v", err))
return return
} }
var foundFlavors []v3alpha1api.ListFlavors var foundFlavors []postgresflexalpha.ListFlavors
for _, flavor := range flavors { for _, flavor := range flavors {
if model.Cpu.ValueInt32() != flavor.Cpu { if model.Cpu.ValueInt64() != *flavor.Cpu {
continue continue
} }
if model.Memory.ValueInt32() != flavor.Memory { if model.Memory.ValueInt64() != *flavor.Memory {
continue continue
} }
if model.NodeType.ValueString() != flavor.NodeType { if model.NodeType.ValueString() != *flavor.NodeType {
continue continue
} }
for _, sc := range flavor.StorageClasses { for _, sc := range *flavor.StorageClasses {
if model.StorageClass.ValueString() != sc.Class { if model.StorageClass.ValueString() != *sc.Class {
continue continue
} }
foundFlavors = append(foundFlavors, flavor) foundFlavors = append(foundFlavors, flavor)
@ -205,11 +205,11 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest,
} }
f := foundFlavors[0] f := foundFlavors[0]
model.Description = types.StringValue(f.Description) model.Description = types.StringValue(*f.Description)
model.Id = utils.BuildInternalTerraformId(model.ProjectId.ValueString(), region, f.Id) model.Id = utils.BuildInternalTerraformId(model.ProjectId.ValueString(), region, *f.Id)
model.FlavorId = types.StringValue(f.Id) model.FlavorId = types.StringValue(*f.Id)
model.MaxGb = types.Int32Value(f.MaxGB) model.MaxGb = types.Int64Value(*f.MaxGB)
model.MinGb = types.Int32Value(f.MinGB) model.MinGb = types.Int64Value(*f.MinGB)
if f.StorageClasses == nil { if f.StorageClasses == nil {
model.StorageClasses = types.ListNull(postgresflexalphaGen.StorageClassesType{ model.StorageClasses = types.ListNull(postgresflexalphaGen.StorageClassesType{
@ -219,15 +219,15 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest,
}) })
} else { } else {
var scList []attr.Value var scList []attr.Value
for _, sc := range f.StorageClasses { for _, sc := range *f.StorageClasses {
scList = append( scList = append(
scList, scList,
postgresflexalphaGen.NewStorageClassesValueMust( postgresflexalphaGen.NewStorageClassesValueMust(
postgresflexalphaGen.StorageClassesValue{}.AttributeTypes(ctx), postgresflexalphaGen.StorageClassesValue{}.AttributeTypes(ctx),
map[string]attr.Value{ map[string]attr.Value{
"class": types.StringValue(sc.Class), "class": types.StringValue(*sc.Class),
"max_io_per_sec": types.Int32Value(sc.MaxIoPerSec), "max_io_per_sec": types.Int64Value(*sc.MaxIoPerSec),
"max_through_in_mb": types.Int32Value(sc.MaxThroughInMb), "max_through_in_mb": types.Int64Value(*sc.MaxThroughInMb),
}, },
), ),
) )

View file

@ -23,7 +23,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
"flavors": schema.ListNestedAttribute{ "flavors": schema.ListNestedAttribute{
NestedObject: schema.NestedAttributeObject{ NestedObject: schema.NestedAttributeObject{
Attributes: map[string]schema.Attribute{ Attributes: map[string]schema.Attribute{
"cpu": schema.Int32Attribute{ "cpu": schema.Int64Attribute{
Computed: true, Computed: true,
Description: "The cpu count of the instance.", Description: "The cpu count of the instance.",
MarkdownDescription: "The cpu count of the instance.", MarkdownDescription: "The cpu count of the instance.",
@ -38,17 +38,17 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The id of the instance flavor.", Description: "The id of the instance flavor.",
MarkdownDescription: "The id of the instance flavor.", MarkdownDescription: "The id of the instance flavor.",
}, },
"max_gb": schema.Int32Attribute{ "max_gb": schema.Int64Attribute{
Computed: true, Computed: true,
Description: "maximum storage which can be ordered for the flavor in Gigabyte.", Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.", MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
}, },
"memory": schema.Int32Attribute{ "memory": schema.Int64Attribute{
Computed: true, Computed: true,
Description: "The memory of the instance in Gibibyte.", Description: "The memory of the instance in Gibibyte.",
MarkdownDescription: "The memory of the instance in Gibibyte.", MarkdownDescription: "The memory of the instance in Gibibyte.",
}, },
"min_gb": schema.Int32Attribute{ "min_gb": schema.Int64Attribute{
Computed: true, Computed: true,
Description: "minimum storage which is required to order in Gigabyte.", Description: "minimum storage which is required to order in Gigabyte.",
MarkdownDescription: "minimum storage which is required to order in Gigabyte.", MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
@ -64,10 +64,10 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
"class": schema.StringAttribute{ "class": schema.StringAttribute{
Computed: true, Computed: true,
}, },
"max_io_per_sec": schema.Int32Attribute{ "max_io_per_sec": schema.Int64Attribute{
Computed: true, Computed: true,
}, },
"max_through_in_mb": schema.Int32Attribute{ "max_through_in_mb": schema.Int64Attribute{
Computed: true, Computed: true,
}, },
}, },
@ -92,7 +92,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
Description: "List of flavors available for the project.", Description: "List of flavors available for the project.",
MarkdownDescription: "List of flavors available for the project.", MarkdownDescription: "List of flavors available for the project.",
}, },
"page": schema.Int32Attribute{ "page": schema.Int64Attribute{
Optional: true, Optional: true,
Computed: true, Computed: true,
Description: "Number of the page of items list to be returned.", Description: "Number of the page of items list to be returned.",
@ -100,19 +100,19 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
}, },
"pagination": schema.SingleNestedAttribute{ "pagination": schema.SingleNestedAttribute{
Attributes: map[string]schema.Attribute{ Attributes: map[string]schema.Attribute{
"page": schema.Int32Attribute{ "page": schema.Int64Attribute{
Computed: true, Computed: true,
}, },
"size": schema.Int32Attribute{ "size": schema.Int64Attribute{
Computed: true, Computed: true,
}, },
"sort": schema.StringAttribute{ "sort": schema.StringAttribute{
Computed: true, Computed: true,
}, },
"total_pages": schema.Int32Attribute{ "total_pages": schema.Int64Attribute{
Computed: true, Computed: true,
}, },
"total_rows": schema.Int32Attribute{ "total_rows": schema.Int64Attribute{
Computed: true, Computed: true,
}, },
}, },
@ -138,7 +138,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
), ),
}, },
}, },
"size": schema.Int32Attribute{ "size": schema.Int64Attribute{
Optional: true, Optional: true,
Computed: true, Computed: true,
Description: "Number of items to be returned on each page.", Description: "Number of items to be returned on each page.",
@ -178,11 +178,11 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
type FlavorsModel struct { type FlavorsModel struct {
Flavors types.List `tfsdk:"flavors"` Flavors types.List `tfsdk:"flavors"`
Page types.Int32 `tfsdk:"page"` Page types.Int64 `tfsdk:"page"`
Pagination PaginationValue `tfsdk:"pagination"` Pagination PaginationValue `tfsdk:"pagination"`
ProjectId types.String `tfsdk:"project_id"` ProjectId types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"` Region types.String `tfsdk:"region"`
Size types.Int32 `tfsdk:"size"` Size types.Int64 `tfsdk:"size"`
Sort types.String `tfsdk:"sort"` Sort types.String `tfsdk:"sort"`
} }
@ -221,12 +221,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags return nil, diags
} }
cpuVal, ok := cpuAttribute.(basetypes.Int32Value) cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`cpu expected to be basetypes.Int32Value, was: %T`, cpuAttribute)) fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
} }
descriptionAttribute, ok := attributes["description"] descriptionAttribute, ok := attributes["description"]
@ -275,12 +275,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags return nil, diags
} }
maxGbVal, ok := maxGbAttribute.(basetypes.Int32Value) maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`max_gb expected to be basetypes.Int32Value, was: %T`, maxGbAttribute)) fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
} }
memoryAttribute, ok := attributes["memory"] memoryAttribute, ok := attributes["memory"]
@ -293,12 +293,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags return nil, diags
} }
memoryVal, ok := memoryAttribute.(basetypes.Int32Value) memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`memory expected to be basetypes.Int32Value, was: %T`, memoryAttribute)) fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
} }
minGbAttribute, ok := attributes["min_gb"] minGbAttribute, ok := attributes["min_gb"]
@ -311,12 +311,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags return nil, diags
} }
minGbVal, ok := minGbAttribute.(basetypes.Int32Value) minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`min_gb expected to be basetypes.Int32Value, was: %T`, minGbAttribute)) fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
} }
nodeTypeAttribute, ok := attributes["node_type"] nodeTypeAttribute, ok := attributes["node_type"]
@ -445,12 +445,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewFlavorsValueUnknown(), diags return NewFlavorsValueUnknown(), diags
} }
cpuVal, ok := cpuAttribute.(basetypes.Int32Value) cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`cpu expected to be basetypes.Int32Value, was: %T`, cpuAttribute)) fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
} }
descriptionAttribute, ok := attributes["description"] descriptionAttribute, ok := attributes["description"]
@ -499,12 +499,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewFlavorsValueUnknown(), diags return NewFlavorsValueUnknown(), diags
} }
maxGbVal, ok := maxGbAttribute.(basetypes.Int32Value) maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`max_gb expected to be basetypes.Int32Value, was: %T`, maxGbAttribute)) fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
} }
memoryAttribute, ok := attributes["memory"] memoryAttribute, ok := attributes["memory"]
@ -517,12 +517,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewFlavorsValueUnknown(), diags return NewFlavorsValueUnknown(), diags
} }
memoryVal, ok := memoryAttribute.(basetypes.Int32Value) memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`memory expected to be basetypes.Int32Value, was: %T`, memoryAttribute)) fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
} }
minGbAttribute, ok := attributes["min_gb"] minGbAttribute, ok := attributes["min_gb"]
@ -535,12 +535,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewFlavorsValueUnknown(), diags return NewFlavorsValueUnknown(), diags
} }
minGbVal, ok := minGbAttribute.(basetypes.Int32Value) minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`min_gb expected to be basetypes.Int32Value, was: %T`, minGbAttribute)) fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
} }
nodeTypeAttribute, ok := attributes["node_type"] nodeTypeAttribute, ok := attributes["node_type"]
@ -664,12 +664,12 @@ func (t FlavorsType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = FlavorsValue{} var _ basetypes.ObjectValuable = FlavorsValue{}
type FlavorsValue struct { type FlavorsValue struct {
Cpu basetypes.Int32Value `tfsdk:"cpu"` Cpu basetypes.Int64Value `tfsdk:"cpu"`
Description basetypes.StringValue `tfsdk:"description"` Description basetypes.StringValue `tfsdk:"description"`
Id basetypes.StringValue `tfsdk:"id"` Id basetypes.StringValue `tfsdk:"id"`
MaxGb basetypes.Int32Value `tfsdk:"max_gb"` MaxGb basetypes.Int64Value `tfsdk:"max_gb"`
Memory basetypes.Int32Value `tfsdk:"memory"` Memory basetypes.Int64Value `tfsdk:"memory"`
MinGb basetypes.Int32Value `tfsdk:"min_gb"` MinGb basetypes.Int64Value `tfsdk:"min_gb"`
NodeType basetypes.StringValue `tfsdk:"node_type"` NodeType basetypes.StringValue `tfsdk:"node_type"`
StorageClasses basetypes.ListValue `tfsdk:"storage_classes"` StorageClasses basetypes.ListValue `tfsdk:"storage_classes"`
state attr.ValueState state attr.ValueState
@ -681,12 +681,12 @@ func (v FlavorsValue) ToTerraformValue(ctx context.Context) (tftypes.Value, erro
var val tftypes.Value var val tftypes.Value
var err error var err error
attrTypes["cpu"] = basetypes.Int32Type{}.TerraformType(ctx) attrTypes["cpu"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["description"] = basetypes.StringType{}.TerraformType(ctx) attrTypes["description"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["id"] = basetypes.StringType{}.TerraformType(ctx) attrTypes["id"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["max_gb"] = basetypes.Int32Type{}.TerraformType(ctx) attrTypes["max_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["memory"] = basetypes.Int32Type{}.TerraformType(ctx) attrTypes["memory"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["min_gb"] = basetypes.Int32Type{}.TerraformType(ctx) attrTypes["min_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["node_type"] = basetypes.StringType{}.TerraformType(ctx) attrTypes["node_type"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["storage_classes"] = basetypes.ListType{ attrTypes["storage_classes"] = basetypes.ListType{
ElemType: StorageClassesValue{}.Type(ctx), ElemType: StorageClassesValue{}.Type(ctx),
@ -821,12 +821,12 @@ func (v FlavorsValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue,
} }
attributeTypes := map[string]attr.Type{ attributeTypes := map[string]attr.Type{
"cpu": basetypes.Int32Type{}, "cpu": basetypes.Int64Type{},
"description": basetypes.StringType{}, "description": basetypes.StringType{},
"id": basetypes.StringType{}, "id": basetypes.StringType{},
"max_gb": basetypes.Int32Type{}, "max_gb": basetypes.Int64Type{},
"memory": basetypes.Int32Type{}, "memory": basetypes.Int64Type{},
"min_gb": basetypes.Int32Type{}, "min_gb": basetypes.Int64Type{},
"node_type": basetypes.StringType{}, "node_type": basetypes.StringType{},
"storage_classes": basetypes.ListType{ "storage_classes": basetypes.ListType{
ElemType: StorageClassesValue{}.Type(ctx), ElemType: StorageClassesValue{}.Type(ctx),
@ -917,12 +917,12 @@ func (v FlavorsValue) Type(ctx context.Context) attr.Type {
func (v FlavorsValue) AttributeTypes(ctx context.Context) map[string]attr.Type { func (v FlavorsValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{ return map[string]attr.Type{
"cpu": basetypes.Int32Type{}, "cpu": basetypes.Int64Type{},
"description": basetypes.StringType{}, "description": basetypes.StringType{},
"id": basetypes.StringType{}, "id": basetypes.StringType{},
"max_gb": basetypes.Int32Type{}, "max_gb": basetypes.Int64Type{},
"memory": basetypes.Int32Type{}, "memory": basetypes.Int64Type{},
"min_gb": basetypes.Int32Type{}, "min_gb": basetypes.Int64Type{},
"node_type": basetypes.StringType{}, "node_type": basetypes.StringType{},
"storage_classes": basetypes.ListType{ "storage_classes": basetypes.ListType{
ElemType: StorageClassesValue{}.Type(ctx), ElemType: StorageClassesValue{}.Type(ctx),
@ -983,12 +983,12 @@ func (t StorageClassesType) ValueFromObject(ctx context.Context, in basetypes.Ob
return nil, diags return nil, diags
} }
maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int32Value) maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int32Value, was: %T`, maxIoPerSecAttribute)) fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute))
} }
maxThroughInMbAttribute, ok := attributes["max_through_in_mb"] maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
@ -1001,12 +1001,12 @@ func (t StorageClassesType) ValueFromObject(ctx context.Context, in basetypes.Ob
return nil, diags return nil, diags
} }
maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int32Value) maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int32Value, was: %T`, maxThroughInMbAttribute)) fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute))
} }
if diags.HasError() { if diags.HasError() {
@ -1112,12 +1112,12 @@ func NewStorageClassesValue(attributeTypes map[string]attr.Type, attributes map[
return NewStorageClassesValueUnknown(), diags return NewStorageClassesValueUnknown(), diags
} }
maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int32Value) maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int32Value, was: %T`, maxIoPerSecAttribute)) fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute))
} }
maxThroughInMbAttribute, ok := attributes["max_through_in_mb"] maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
@ -1130,12 +1130,12 @@ func NewStorageClassesValue(attributeTypes map[string]attr.Type, attributes map[
return NewStorageClassesValueUnknown(), diags return NewStorageClassesValueUnknown(), diags
} }
maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int32Value) maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int32Value, was: %T`, maxThroughInMbAttribute)) fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute))
} }
if diags.HasError() { if diags.HasError() {
@ -1219,8 +1219,8 @@ var _ basetypes.ObjectValuable = StorageClassesValue{}
type StorageClassesValue struct { type StorageClassesValue struct {
Class basetypes.StringValue `tfsdk:"class"` Class basetypes.StringValue `tfsdk:"class"`
MaxIoPerSec basetypes.Int32Value `tfsdk:"max_io_per_sec"` MaxIoPerSec basetypes.Int64Value `tfsdk:"max_io_per_sec"`
MaxThroughInMb basetypes.Int32Value `tfsdk:"max_through_in_mb"` MaxThroughInMb basetypes.Int64Value `tfsdk:"max_through_in_mb"`
state attr.ValueState state attr.ValueState
} }
@ -1231,8 +1231,8 @@ func (v StorageClassesValue) ToTerraformValue(ctx context.Context) (tftypes.Valu
var err error var err error
attrTypes["class"] = basetypes.StringType{}.TerraformType(ctx) attrTypes["class"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["max_io_per_sec"] = basetypes.Int32Type{}.TerraformType(ctx) attrTypes["max_io_per_sec"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["max_through_in_mb"] = basetypes.Int32Type{}.TerraformType(ctx) attrTypes["max_through_in_mb"] = basetypes.Int64Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes} objectType := tftypes.Object{AttributeTypes: attrTypes}
@ -1295,8 +1295,8 @@ func (v StorageClassesValue) ToObjectValue(ctx context.Context) (basetypes.Objec
attributeTypes := map[string]attr.Type{ attributeTypes := map[string]attr.Type{
"class": basetypes.StringType{}, "class": basetypes.StringType{},
"max_io_per_sec": basetypes.Int32Type{}, "max_io_per_sec": basetypes.Int64Type{},
"max_through_in_mb": basetypes.Int32Type{}, "max_through_in_mb": basetypes.Int64Type{},
} }
if v.IsNull() { if v.IsNull() {
@ -1359,8 +1359,8 @@ func (v StorageClassesValue) Type(ctx context.Context) attr.Type {
func (v StorageClassesValue) AttributeTypes(ctx context.Context) map[string]attr.Type { func (v StorageClassesValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{ return map[string]attr.Type{
"class": basetypes.StringType{}, "class": basetypes.StringType{},
"max_io_per_sec": basetypes.Int32Type{}, "max_io_per_sec": basetypes.Int64Type{},
"max_through_in_mb": basetypes.Int32Type{}, "max_through_in_mb": basetypes.Int64Type{},
} }
} }
@ -1399,12 +1399,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags return nil, diags
} }
pageVal, ok := pageAttribute.(basetypes.Int32Value) pageVal, ok := pageAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute)) fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
} }
sizeAttribute, ok := attributes["size"] sizeAttribute, ok := attributes["size"]
@ -1417,12 +1417,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags return nil, diags
} }
sizeVal, ok := sizeAttribute.(basetypes.Int32Value) sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute)) fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
} }
sortAttribute, ok := attributes["sort"] sortAttribute, ok := attributes["sort"]
@ -1453,12 +1453,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags return nil, diags
} }
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value) totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute)) fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
} }
totalRowsAttribute, ok := attributes["total_rows"] totalRowsAttribute, ok := attributes["total_rows"]
@ -1471,12 +1471,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags return nil, diags
} }
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value) totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute)) fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
} }
if diags.HasError() { if diags.HasError() {
@ -1566,12 +1566,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags return NewPaginationValueUnknown(), diags
} }
pageVal, ok := pageAttribute.(basetypes.Int32Value) pageVal, ok := pageAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute)) fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
} }
sizeAttribute, ok := attributes["size"] sizeAttribute, ok := attributes["size"]
@ -1584,12 +1584,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags return NewPaginationValueUnknown(), diags
} }
sizeVal, ok := sizeAttribute.(basetypes.Int32Value) sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute)) fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
} }
sortAttribute, ok := attributes["sort"] sortAttribute, ok := attributes["sort"]
@ -1620,12 +1620,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags return NewPaginationValueUnknown(), diags
} }
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value) totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute)) fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
} }
totalRowsAttribute, ok := attributes["total_rows"] totalRowsAttribute, ok := attributes["total_rows"]
@ -1638,12 +1638,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags return NewPaginationValueUnknown(), diags
} }
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value) totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute)) fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
} }
if diags.HasError() { if diags.HasError() {
@ -1728,11 +1728,11 @@ func (t PaginationType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = PaginationValue{} var _ basetypes.ObjectValuable = PaginationValue{}
type PaginationValue struct { type PaginationValue struct {
Page basetypes.Int32Value `tfsdk:"page"` Page basetypes.Int64Value `tfsdk:"page"`
Size basetypes.Int32Value `tfsdk:"size"` Size basetypes.Int64Value `tfsdk:"size"`
Sort basetypes.StringValue `tfsdk:"sort"` Sort basetypes.StringValue `tfsdk:"sort"`
TotalPages basetypes.Int32Value `tfsdk:"total_pages"` TotalPages basetypes.Int64Value `tfsdk:"total_pages"`
TotalRows basetypes.Int32Value `tfsdk:"total_rows"` TotalRows basetypes.Int64Value `tfsdk:"total_rows"`
state attr.ValueState state attr.ValueState
} }
@ -1742,11 +1742,11 @@ func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, e
var val tftypes.Value var val tftypes.Value
var err error var err error
attrTypes["page"] = basetypes.Int32Type{}.TerraformType(ctx) attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["size"] = basetypes.Int32Type{}.TerraformType(ctx) attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx) attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["total_pages"] = basetypes.Int32Type{}.TerraformType(ctx) attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["total_rows"] = basetypes.Int32Type{}.TerraformType(ctx) attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes} objectType := tftypes.Object{AttributeTypes: attrTypes}
@ -1824,11 +1824,11 @@ func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectVal
var diags diag.Diagnostics var diags diag.Diagnostics
attributeTypes := map[string]attr.Type{ attributeTypes := map[string]attr.Type{
"page": basetypes.Int32Type{}, "page": basetypes.Int64Type{},
"size": basetypes.Int32Type{}, "size": basetypes.Int64Type{},
"sort": basetypes.StringType{}, "sort": basetypes.StringType{},
"total_pages": basetypes.Int32Type{}, "total_pages": basetypes.Int64Type{},
"total_rows": basetypes.Int32Type{}, "total_rows": basetypes.Int64Type{},
} }
if v.IsNull() { if v.IsNull() {
@ -1900,10 +1900,10 @@ func (v PaginationValue) Type(ctx context.Context) attr.Type {
func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type { func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{ return map[string]attr.Type{
"page": basetypes.Int32Type{}, "page": basetypes.Int64Type{},
"size": basetypes.Int32Type{}, "size": basetypes.Int64Type{},
"sort": basetypes.StringType{}, "sort": basetypes.StringType{},
"total_pages": basetypes.Int32Type{}, "total_pages": basetypes.Int64Type{},
"total_rows": basetypes.Int32Type{}, "total_rows": basetypes.Int64Type{},
} }
} }

View file

@ -1,24 +1,24 @@
package postgresflexalphaflavor package postgresFlexAlphaFlavor
import ( import (
"context" "context"
"fmt" "fmt"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api" postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
) )
type flavorsClientReader interface { type flavorsClientReader interface {
GetFlavorsRequest( GetFlavorsRequest(
ctx context.Context, ctx context.Context,
projectId, region string, projectId, region string,
) v3alpha1api.ApiGetFlavorsRequestRequest ) postgresflex.ApiGetFlavorsRequestRequest
} }
func getAllFlavors(ctx context.Context, client flavorsClientReader, projectId, region string) ( func getAllFlavors(ctx context.Context, client flavorsClientReader, projectId, region string) (
[]v3alpha1api.ListFlavors, []postgresflex.ListFlavors,
error, error,
) { ) {
getAllFilter := func(_ v3alpha1api.ListFlavors) bool { return true } getAllFilter := func(_ postgresflex.ListFlavors) bool { return true }
flavorList, err := getFlavorsByFilter(ctx, client, projectId, region, getAllFilter) flavorList, err := getFlavorsByFilter(ctx, client, projectId, region, getAllFilter)
if err != nil { if err != nil {
return nil, err return nil, err
@ -32,29 +32,29 @@ func getFlavorsByFilter(
ctx context.Context, ctx context.Context,
client flavorsClientReader, client flavorsClientReader,
projectId, region string, projectId, region string,
filter func(db v3alpha1api.ListFlavors) bool, filter func(db postgresflex.ListFlavors) bool,
) ([]v3alpha1api.ListFlavors, error) { ) ([]postgresflex.ListFlavors, error) {
if projectId == "" || region == "" { if projectId == "" || region == "" {
return nil, fmt.Errorf("listing postgresflex flavors: projectId and region are required") return nil, fmt.Errorf("listing postgresflex flavors: projectId and region are required")
} }
const pageSize = 25 const pageSize = 25
var result = make([]v3alpha1api.ListFlavors, 0) var result = make([]postgresflex.ListFlavors, 0)
for page := int32(1); ; page++ { for page := int32(1); ; page++ {
res, err := client.GetFlavorsRequest(ctx, projectId, region). res, err := client.GetFlavorsRequest(ctx, projectId, region).
Page(page).Size(pageSize).Sort(v3alpha1api.FLAVORSORT_ID_ASC).Execute() Page(page).Size(pageSize).Sort(postgresflex.FLAVORSORT_ID_ASC).Execute()
if err != nil { if err != nil {
return nil, fmt.Errorf("requesting flavors list (page %d): %w", page, err) return nil, fmt.Errorf("requesting flavors list (page %d): %w", page, err)
} }
// If the API returns no flavors, we have reached the end of the list. // If the API returns no flavors, we have reached the end of the list.
if len(res.Flavors) == 0 { if res.Flavors == nil || len(*res.Flavors) == 0 {
break break
} }
for _, flavor := range res.Flavors { for _, flavor := range *res.Flavors {
if filter(flavor) { if filter(flavor) {
result = append(result, flavor) result = append(result, flavor)
} }

View file

@ -1,11 +1,12 @@
package postgresflexalphaflavor package postgresFlexAlphaFlavor
/*
import ( import (
"context" "context"
"testing" "testing"
postgresflex "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api" "github.com/stackitcloud/stackit-sdk-go/core/utils"
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
) )
type mockRequest struct { type mockRequest struct {
@ -29,25 +30,25 @@ func (m *mockFlavorsClient) GetFlavorsRequest(_ context.Context, _, _ string) po
return m.executeRequest() return m.executeRequest()
} }
var mockResp = func(page int32) (*postgresflex.GetFlavorsResponse, error) { var mockResp = func(page int64) (*postgresflex.GetFlavorsResponse, error) {
if page == 1 { if page == 1 {
return &postgresflex.GetFlavorsResponse{ return &postgresflex.GetFlavorsResponse{
Flavors: []postgresflex.ListFlavors{ Flavors: &[]postgresflex.ListFlavors{
{Id: "flavor-1", Description: "first"}, {Id: utils.Ptr("flavor-1"), Description: utils.Ptr("first")},
{Id: "flavor-2", Description: "second"}, {Id: utils.Ptr("flavor-2"), Description: utils.Ptr("second")},
}, },
}, nil }, nil
} }
if page == 2 { if page == 2 {
return &postgresflex.GetFlavorsResponse{ return &postgresflex.GetFlavorsResponse{
Flavors: []postgresflex.ListFlavors{ Flavors: &[]postgresflex.ListFlavors{
{Id: "flavor-3", Description: "three"}, {Id: utils.Ptr("flavor-3"), Description: utils.Ptr("three")},
}, },
}, nil }, nil
} }
return &postgresflex.GetFlavorsResponse{ return &postgresflex.GetFlavorsResponse{
Flavors: []postgresflex.ListFlavors{}, Flavors: &[]postgresflex.ListFlavors{},
}, nil }, nil
} }
@ -71,7 +72,7 @@ func TestGetFlavorsByFilter(t *testing.T) {
{ {
description: "Success - Filter flavors by description", description: "Success - Filter flavors by description",
projectId: "pid", region: "reg", projectId: "pid", region: "reg",
filter: func(f postgresflex.ListFlavors) bool { return f.Description == "first" }, filter: func(f postgresflex.ListFlavors) bool { return *f.Description == "first" },
wantCount: 1, wantCount: 1,
wantErr: false, wantErr: false,
}, },
@ -85,10 +86,10 @@ func TestGetFlavorsByFilter(t *testing.T) {
for _, tt := range tests { for _, tt := range tests {
t.Run( t.Run(
tt.description, func(t *testing.T) { tt.description, func(t *testing.T) {
var currentPage int32 var currentPage int64
client := &mockFlavorsClient{ client := &mockFlavorsClient{
executeRequest: func() postgresflex.ApiGetFlavorsRequestRequest { executeRequest: func() postgresflex.ApiGetFlavorsRequestRequest {
return mockRequest{ return &mockRequest{
executeFunc: func() (*postgresflex.GetFlavorsResponse, error) { executeFunc: func() (*postgresflex.GetFlavorsResponse, error) {
currentPage++ currentPage++
return mockResp(currentPage) return mockResp(currentPage)
@ -112,10 +113,10 @@ func TestGetFlavorsByFilter(t *testing.T) {
} }
func TestGetAllFlavors(t *testing.T) { func TestGetAllFlavors(t *testing.T) {
var currentPage int32 var currentPage int64
client := &mockFlavorsClient{ client := &mockFlavorsClient{
executeRequest: func() postgresflex.ApiGetFlavorsRequestRequest { executeRequest: func() postgresflex.ApiGetFlavorsRequestRequest {
return mockRequest{ return &mockRequest{
executeFunc: func() (*postgresflex.GetFlavorsResponse, error) { executeFunc: func() (*postgresflex.GetFlavorsResponse, error) {
currentPage++ currentPage++
return mockResp(currentPage) return mockResp(currentPage)
@ -132,4 +133,3 @@ func TestGetAllFlavors(t *testing.T) {
t.Errorf("getAllFlavors() expected 3 flavor, got %d", len(res)) t.Errorf("getAllFlavors() expected 3 flavor, got %d", len(res))
} }
} }
*/

View file

@ -5,8 +5,8 @@ import (
"github.com/hashicorp/terraform-plugin-framework/datasource" "github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-log/tflog" "github.com/hashicorp/terraform-plugin-log/tflog"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
postgresflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavors/datasources_gen" postgresflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavors/datasources_gen"
@ -26,7 +26,7 @@ func NewFlavorsDataSource() datasource.DataSource {
type dataSourceModel = postgresflexalphaGen.FlavorsModel type dataSourceModel = postgresflexalphaGen.FlavorsModel
type flavorsDataSource struct { type flavorsDataSource struct {
client *v3alpha1api.APIClient client *postgresflexalpha.APIClient
providerData core.ProviderData providerData core.ProviderData
} }

View file

@ -6,8 +6,8 @@ import (
"net/http" "net/http"
"github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-framework/types"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/datasources_gen" postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/datasources_gen"
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils" postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
@ -37,7 +37,7 @@ type dataSourceModel struct {
// instanceDataSource is the data source implementation. // instanceDataSource is the data source implementation.
type instanceDataSource struct { type instanceDataSource struct {
client *v3alpha1api.APIClient client *postgresflexalpha.APIClient
providerData core.ProviderData providerData core.ProviderData
} }
@ -96,7 +96,7 @@ func (r *instanceDataSource) Read(
ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "instance_id", instanceId) ctx = tflog.SetField(ctx, "instance_id", instanceId)
ctx = tflog.SetField(ctx, "region", region) ctx = tflog.SetField(ctx, "region", region)
instanceResp, err := r.client.DefaultAPI.GetInstanceRequest(ctx, projectId, region, instanceId).Execute() instanceResp, err := r.client.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
if err != nil { if err != nil {
utils.LogError( utils.LogError(
ctx, ctx,

View file

@ -28,12 +28,10 @@ func InstanceDataSourceSchema(ctx context.Context) schema.Schema {
}, },
"backup_schedule": schema.StringAttribute{ "backup_schedule": schema.StringAttribute{
Computed: true, Computed: true,
Description: "The schedule for when the database backup will be created. Currently, ONLY daily schedules are supported (every 24 hours). The schedule is written as a cron schedule.", Description: "The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.",
MarkdownDescription: "The schedule for when the database backup will be created. Currently, ONLY daily schedules are supported (every 24 hours). The schedule is written as a cron schedule.", MarkdownDescription: "The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.",
}, },
"connection_info": schema.SingleNestedAttribute{ "connection_info": schema.SingleNestedAttribute{
Attributes: map[string]schema.Attribute{
"write": schema.SingleNestedAttribute{
Attributes: map[string]schema.Attribute{ Attributes: map[string]schema.Attribute{
"host": schema.StringAttribute{ "host": schema.StringAttribute{
Computed: true, Computed: true,
@ -46,24 +44,14 @@ func InstanceDataSourceSchema(ctx context.Context) schema.Schema {
MarkdownDescription: "The port of the instance.", MarkdownDescription: "The port of the instance.",
}, },
}, },
CustomType: WriteType{
ObjectType: types.ObjectType{
AttrTypes: WriteValue{}.AttributeTypes(ctx),
},
},
Computed: true,
Description: "The DNS name and port in the instance overview",
MarkdownDescription: "The DNS name and port in the instance overview",
},
},
CustomType: ConnectionInfoType{ CustomType: ConnectionInfoType{
ObjectType: types.ObjectType{ ObjectType: types.ObjectType{
AttrTypes: ConnectionInfoValue{}.AttributeTypes(ctx), AttrTypes: ConnectionInfoValue{}.AttributeTypes(ctx),
}, },
}, },
Computed: true, Computed: true,
Description: "The connection information of the instance", Description: "The DNS name and port in the instance overview",
MarkdownDescription: "The connection information of the instance", MarkdownDescription: "The DNS name and port in the instance overview",
}, },
"encryption": schema.SingleNestedAttribute{ "encryption": schema.SingleNestedAttribute{
Attributes: map[string]schema.Attribute{ Attributes: map[string]schema.Attribute{
@ -255,22 +243,40 @@ func (t ConnectionInfoType) ValueFromObject(ctx context.Context, in basetypes.Ob
attributes := in.Attributes() attributes := in.Attributes()
writeAttribute, ok := attributes["write"] hostAttribute, ok := attributes["host"]
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Missing", "Attribute Missing",
`write is missing from object`) `host is missing from object`)
return nil, diags return nil, diags
} }
writeVal, ok := writeAttribute.(basetypes.ObjectValue) hostVal, ok := hostAttribute.(basetypes.StringValue)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`write expected to be basetypes.ObjectValue, was: %T`, writeAttribute)) fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
}
portAttribute, ok := attributes["port"]
if !ok {
diags.AddError(
"Attribute Missing",
`port is missing from object`)
return nil, diags
}
portVal, ok := portAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
} }
if diags.HasError() { if diags.HasError() {
@ -278,7 +284,8 @@ func (t ConnectionInfoType) ValueFromObject(ctx context.Context, in basetypes.Ob
} }
return ConnectionInfoValue{ return ConnectionInfoValue{
Write: writeVal, Host: hostVal,
Port: portVal,
state: attr.ValueStateKnown, state: attr.ValueStateKnown,
}, diags }, diags
} }
@ -346,22 +353,40 @@ func NewConnectionInfoValue(attributeTypes map[string]attr.Type, attributes map[
return NewConnectionInfoValueUnknown(), diags return NewConnectionInfoValueUnknown(), diags
} }
writeAttribute, ok := attributes["write"] hostAttribute, ok := attributes["host"]
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Missing", "Attribute Missing",
`write is missing from object`) `host is missing from object`)
return NewConnectionInfoValueUnknown(), diags return NewConnectionInfoValueUnknown(), diags
} }
writeVal, ok := writeAttribute.(basetypes.ObjectValue) hostVal, ok := hostAttribute.(basetypes.StringValue)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`write expected to be basetypes.ObjectValue, was: %T`, writeAttribute)) fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
}
portAttribute, ok := attributes["port"]
if !ok {
diags.AddError(
"Attribute Missing",
`port is missing from object`)
return NewConnectionInfoValueUnknown(), diags
}
portVal, ok := portAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
} }
if diags.HasError() { if diags.HasError() {
@ -369,7 +394,8 @@ func NewConnectionInfoValue(attributeTypes map[string]attr.Type, attributes map[
} }
return ConnectionInfoValue{ return ConnectionInfoValue{
Write: writeVal, Host: hostVal,
Port: portVal,
state: attr.ValueStateKnown, state: attr.ValueStateKnown,
}, diags }, diags
} }
@ -442,401 +468,12 @@ func (t ConnectionInfoType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = ConnectionInfoValue{} var _ basetypes.ObjectValuable = ConnectionInfoValue{}
type ConnectionInfoValue struct { type ConnectionInfoValue struct {
Write basetypes.ObjectValue `tfsdk:"write"`
state attr.ValueState
}
func (v ConnectionInfoValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
attrTypes := make(map[string]tftypes.Type, 1)
var val tftypes.Value
var err error
attrTypes["write"] = basetypes.ObjectType{
AttrTypes: WriteValue{}.AttributeTypes(ctx),
}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
switch v.state {
case attr.ValueStateKnown:
vals := make(map[string]tftypes.Value, 1)
val, err = v.Write.ToTerraformValue(ctx)
if err != nil {
return tftypes.NewValue(objectType, tftypes.UnknownValue), err
}
vals["write"] = val
if err := tftypes.ValidateValue(objectType, vals); err != nil {
return tftypes.NewValue(objectType, tftypes.UnknownValue), err
}
return tftypes.NewValue(objectType, vals), nil
case attr.ValueStateNull:
return tftypes.NewValue(objectType, nil), nil
case attr.ValueStateUnknown:
return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
default:
panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
}
}
func (v ConnectionInfoValue) IsNull() bool {
return v.state == attr.ValueStateNull
}
func (v ConnectionInfoValue) IsUnknown() bool {
return v.state == attr.ValueStateUnknown
}
func (v ConnectionInfoValue) String() string {
return "ConnectionInfoValue"
}
func (v ConnectionInfoValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
var diags diag.Diagnostics
var write basetypes.ObjectValue
if v.Write.IsNull() {
write = types.ObjectNull(
WriteValue{}.AttributeTypes(ctx),
)
}
if v.Write.IsUnknown() {
write = types.ObjectUnknown(
WriteValue{}.AttributeTypes(ctx),
)
}
if !v.Write.IsNull() && !v.Write.IsUnknown() {
write = types.ObjectValueMust(
WriteValue{}.AttributeTypes(ctx),
v.Write.Attributes(),
)
}
attributeTypes := map[string]attr.Type{
"write": basetypes.ObjectType{
AttrTypes: WriteValue{}.AttributeTypes(ctx),
},
}
if v.IsNull() {
return types.ObjectNull(attributeTypes), diags
}
if v.IsUnknown() {
return types.ObjectUnknown(attributeTypes), diags
}
objVal, diags := types.ObjectValue(
attributeTypes,
map[string]attr.Value{
"write": write,
})
return objVal, diags
}
func (v ConnectionInfoValue) Equal(o attr.Value) bool {
other, ok := o.(ConnectionInfoValue)
if !ok {
return false
}
if v.state != other.state {
return false
}
if v.state != attr.ValueStateKnown {
return true
}
if !v.Write.Equal(other.Write) {
return false
}
return true
}
func (v ConnectionInfoValue) Type(ctx context.Context) attr.Type {
return ConnectionInfoType{
basetypes.ObjectType{
AttrTypes: v.AttributeTypes(ctx),
},
}
}
func (v ConnectionInfoValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"write": basetypes.ObjectType{
AttrTypes: WriteValue{}.AttributeTypes(ctx),
},
}
}
var _ basetypes.ObjectTypable = WriteType{}
type WriteType struct {
basetypes.ObjectType
}
func (t WriteType) Equal(o attr.Type) bool {
other, ok := o.(WriteType)
if !ok {
return false
}
return t.ObjectType.Equal(other.ObjectType)
}
func (t WriteType) String() string {
return "WriteType"
}
func (t WriteType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
var diags diag.Diagnostics
attributes := in.Attributes()
hostAttribute, ok := attributes["host"]
if !ok {
diags.AddError(
"Attribute Missing",
`host is missing from object`)
return nil, diags
}
hostVal, ok := hostAttribute.(basetypes.StringValue)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
}
portAttribute, ok := attributes["port"]
if !ok {
diags.AddError(
"Attribute Missing",
`port is missing from object`)
return nil, diags
}
portVal, ok := portAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
}
if diags.HasError() {
return nil, diags
}
return WriteValue{
Host: hostVal,
Port: portVal,
state: attr.ValueStateKnown,
}, diags
}
func NewWriteValueNull() WriteValue {
return WriteValue{
state: attr.ValueStateNull,
}
}
func NewWriteValueUnknown() WriteValue {
return WriteValue{
state: attr.ValueStateUnknown,
}
}
func NewWriteValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (WriteValue, diag.Diagnostics) {
var diags diag.Diagnostics
// Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
ctx := context.Background()
for name, attributeType := range attributeTypes {
attribute, ok := attributes[name]
if !ok {
diags.AddError(
"Missing WriteValue Attribute Value",
"While creating a WriteValue value, a missing attribute value was detected. "+
"A WriteValue must contain values for all attributes, even if null or unknown. "+
"This is always an issue with the provider and should be reported to the provider developers.\n\n"+
fmt.Sprintf("WriteValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
)
continue
}
if !attributeType.Equal(attribute.Type(ctx)) {
diags.AddError(
"Invalid WriteValue Attribute Type",
"While creating a WriteValue value, an invalid attribute value was detected. "+
"A WriteValue must use a matching attribute type for the value. "+
"This is always an issue with the provider and should be reported to the provider developers.\n\n"+
fmt.Sprintf("WriteValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
fmt.Sprintf("WriteValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
)
}
}
for name := range attributes {
_, ok := attributeTypes[name]
if !ok {
diags.AddError(
"Extra WriteValue Attribute Value",
"While creating a WriteValue value, an extra attribute value was detected. "+
"A WriteValue must not contain values beyond the expected attribute types. "+
"This is always an issue with the provider and should be reported to the provider developers.\n\n"+
fmt.Sprintf("Extra WriteValue Attribute Name: %s", name),
)
}
}
if diags.HasError() {
return NewWriteValueUnknown(), diags
}
hostAttribute, ok := attributes["host"]
if !ok {
diags.AddError(
"Attribute Missing",
`host is missing from object`)
return NewWriteValueUnknown(), diags
}
hostVal, ok := hostAttribute.(basetypes.StringValue)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
}
portAttribute, ok := attributes["port"]
if !ok {
diags.AddError(
"Attribute Missing",
`port is missing from object`)
return NewWriteValueUnknown(), diags
}
portVal, ok := portAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
}
if diags.HasError() {
return NewWriteValueUnknown(), diags
}
return WriteValue{
Host: hostVal,
Port: portVal,
state: attr.ValueStateKnown,
}, diags
}
func NewWriteValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) WriteValue {
object, diags := NewWriteValue(attributeTypes, attributes)
if diags.HasError() {
// This could potentially be added to the diag package.
diagsStrings := make([]string, 0, len(diags))
for _, diagnostic := range diags {
diagsStrings = append(diagsStrings, fmt.Sprintf(
"%s | %s | %s",
diagnostic.Severity(),
diagnostic.Summary(),
diagnostic.Detail()))
}
panic("NewWriteValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
}
return object
}
func (t WriteType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
if in.Type() == nil {
return NewWriteValueNull(), nil
}
if !in.Type().Equal(t.TerraformType(ctx)) {
return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
}
if !in.IsKnown() {
return NewWriteValueUnknown(), nil
}
if in.IsNull() {
return NewWriteValueNull(), nil
}
attributes := map[string]attr.Value{}
val := map[string]tftypes.Value{}
err := in.As(&val)
if err != nil {
return nil, err
}
for k, v := range val {
a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
if err != nil {
return nil, err
}
attributes[k] = a
}
return NewWriteValueMust(WriteValue{}.AttributeTypes(ctx), attributes), nil
}
func (t WriteType) ValueType(ctx context.Context) attr.Value {
return WriteValue{}
}
var _ basetypes.ObjectValuable = WriteValue{}
type WriteValue struct {
Host basetypes.StringValue `tfsdk:"host"` Host basetypes.StringValue `tfsdk:"host"`
Port basetypes.Int64Value `tfsdk:"port"` Port basetypes.Int64Value `tfsdk:"port"`
state attr.ValueState state attr.ValueState
} }
func (v WriteValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { func (v ConnectionInfoValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
attrTypes := make(map[string]tftypes.Type, 2) attrTypes := make(map[string]tftypes.Type, 2)
var val tftypes.Value var val tftypes.Value
@ -881,19 +518,19 @@ func (v WriteValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error)
} }
} }
func (v WriteValue) IsNull() bool { func (v ConnectionInfoValue) IsNull() bool {
return v.state == attr.ValueStateNull return v.state == attr.ValueStateNull
} }
func (v WriteValue) IsUnknown() bool { func (v ConnectionInfoValue) IsUnknown() bool {
return v.state == attr.ValueStateUnknown return v.state == attr.ValueStateUnknown
} }
func (v WriteValue) String() string { func (v ConnectionInfoValue) String() string {
return "WriteValue" return "ConnectionInfoValue"
} }
func (v WriteValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { func (v ConnectionInfoValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
var diags diag.Diagnostics var diags diag.Diagnostics
attributeTypes := map[string]attr.Type{ attributeTypes := map[string]attr.Type{
@ -919,8 +556,8 @@ func (v WriteValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, d
return objVal, diags return objVal, diags
} }
func (v WriteValue) Equal(o attr.Value) bool { func (v ConnectionInfoValue) Equal(o attr.Value) bool {
other, ok := o.(WriteValue) other, ok := o.(ConnectionInfoValue)
if !ok { if !ok {
return false return false
@ -945,15 +582,15 @@ func (v WriteValue) Equal(o attr.Value) bool {
return true return true
} }
func (v WriteValue) Type(ctx context.Context) attr.Type { func (v ConnectionInfoValue) Type(ctx context.Context) attr.Type {
return WriteType{ return ConnectionInfoType{
basetypes.ObjectType{ basetypes.ObjectType{
AttrTypes: v.AttributeTypes(ctx), AttrTypes: v.AttributeTypes(ctx),
}, },
} }
} }
func (v WriteValue) AttributeTypes(ctx context.Context) map[string]attr.Type { func (v ConnectionInfoValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{ return map[string]attr.Type{
"host": basetypes.StringType{}, "host": basetypes.StringType{},
"port": basetypes.Int64Type{}, "port": basetypes.Int64Type{},

View file

@ -7,8 +7,8 @@ import (
"github.com/hashicorp/terraform-plugin-framework/attr" "github.com/hashicorp/terraform-plugin-framework/attr"
"github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes" "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
postgresflex "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
postgresflexalphadatasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/datasources_gen" postgresflexalphadatasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/datasources_gen"
postgresflexalpharesource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/resources_gen" postgresflexalpharesource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/resources_gen"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
@ -33,7 +33,9 @@ func mapGetInstanceResponseToModel(
) )
} }
isConnectionInfoIncomplete := resp.ConnectionInfo.Write.Host == "" || resp.ConnectionInfo.Write.Port == 0 isConnectionInfoIncomplete := resp.ConnectionInfo == nil ||
resp.ConnectionInfo.Host == nil || *resp.ConnectionInfo.Host == "" ||
resp.ConnectionInfo.Port == nil || *resp.ConnectionInfo.Port == 0
if isConnectionInfoIncomplete { if isConnectionInfoIncomplete {
m.ConnectionInfo = postgresflexalpharesource.NewConnectionInfoValueNull() m.ConnectionInfo = postgresflexalpharesource.NewConnectionInfoValueNull()
@ -41,19 +43,22 @@ func mapGetInstanceResponseToModel(
m.ConnectionInfo = postgresflexalpharesource.NewConnectionInfoValueMust( m.ConnectionInfo = postgresflexalpharesource.NewConnectionInfoValueMust(
postgresflexalpharesource.ConnectionInfoValue{}.AttributeTypes(ctx), postgresflexalpharesource.ConnectionInfoValue{}.AttributeTypes(ctx),
map[string]attr.Value{ map[string]attr.Value{
// careful - we can not use NewWriteValueMust here "host": types.StringPointerValue(resp.ConnectionInfo.Host),
"write": basetypes.NewObjectValueMust( "port": types.Int64PointerValue(resp.ConnectionInfo.Port),
postgresflexalpharesource.WriteValue{}.AttributeTypes(ctx),
map[string]attr.Value{
"host": types.StringValue(resp.ConnectionInfo.Write.Host),
// note: IDE does not show that port is actually an int64 in the Schema
"port": types.Int64Value(int64(resp.ConnectionInfo.Write.Port)),
},
),
}, },
) )
} }
m.ConnectionInfo.Host = types.StringValue("")
if host, ok := resp.ConnectionInfo.GetHostOk(); ok {
m.ConnectionInfo.Host = types.StringValue(host)
}
m.ConnectionInfo.Port = types.Int64Value(0)
if port, ok := resp.ConnectionInfo.GetPortOk(); ok {
m.ConnectionInfo.Port = types.Int64Value(port)
}
m.FlavorId = types.StringValue(resp.GetFlavorId()) m.FlavorId = types.StringValue(resp.GetFlavorId())
if m.Id.IsNull() || m.Id.IsUnknown() { if m.Id.IsNull() || m.Id.IsUnknown() {
m.Id = utils.BuildInternalTerraformId( m.Id = utils.BuildInternalTerraformId(
@ -62,7 +67,7 @@ func mapGetInstanceResponseToModel(
m.InstanceId.ValueString(), m.InstanceId.ValueString(),
) )
} }
m.InstanceId = types.StringValue(resp.Id) m.InstanceId = types.StringPointerValue(resp.Id)
m.IsDeletable = types.BoolValue(resp.GetIsDeletable()) m.IsDeletable = types.BoolValue(resp.GetIsDeletable())
@ -75,12 +80,12 @@ func mapGetInstanceResponseToModel(
netInstAdd := types.StringValue("") netInstAdd := types.StringValue("")
if instAdd, ok := resp.Network.GetInstanceAddressOk(); ok { if instAdd, ok := resp.Network.GetInstanceAddressOk(); ok {
netInstAdd = types.StringValue(*instAdd) netInstAdd = types.StringValue(instAdd)
} }
netRtrAdd := types.StringValue("") netRtrAdd := types.StringValue("")
if rtrAdd, ok := resp.Network.GetRouterAddressOk(); ok { if rtrAdd, ok := resp.Network.GetRouterAddressOk(); ok {
netRtrAdd = types.StringValue(*rtrAdd) netRtrAdd = types.StringValue(rtrAdd)
} }
net, diags := postgresflexalpharesource.NewNetworkValue( net, diags := postgresflexalpharesource.NewNetworkValue(
@ -98,7 +103,7 @@ func mapGetInstanceResponseToModel(
m.Network = net m.Network = net
m.Replicas = types.Int64Value(int64(resp.GetReplicas())) m.Replicas = types.Int64Value(int64(resp.GetReplicas()))
m.RetentionDays = types.Int64Value(int64(resp.GetRetentionDays())) m.RetentionDays = types.Int64Value(resp.GetRetentionDays())
m.Name = types.StringValue(resp.GetName()) m.Name = types.StringValue(resp.GetName())
@ -108,7 +113,7 @@ func mapGetInstanceResponseToModel(
postgresflexalpharesource.StorageValue{}.AttributeTypes(ctx), postgresflexalpharesource.StorageValue{}.AttributeTypes(ctx),
map[string]attr.Value{ map[string]attr.Value{
"performance_class": types.StringValue(resp.Storage.GetPerformanceClass()), "performance_class": types.StringValue(resp.Storage.GetPerformanceClass()),
"size": types.Int64Value(int64(resp.Storage.GetSize())), "size": types.Int64Value(resp.Storage.GetSize()),
}, },
) )
if diags.HasError() { if diags.HasError() {
@ -131,7 +136,7 @@ func mapGetDataInstanceResponseToModel(
m.FlavorId = types.StringValue(resp.GetFlavorId()) m.FlavorId = types.StringValue(resp.GetFlavorId())
m.Id = utils.BuildInternalTerraformId(m.ProjectId.ValueString(), m.Region.ValueString(), m.InstanceId.ValueString()) m.Id = utils.BuildInternalTerraformId(m.ProjectId.ValueString(), m.Region.ValueString(), m.InstanceId.ValueString())
m.InstanceId = types.StringValue(resp.Id) m.InstanceId = types.StringPointerValue(resp.Id)
m.IsDeletable = types.BoolValue(resp.GetIsDeletable()) m.IsDeletable = types.BoolValue(resp.GetIsDeletable())
m.Name = types.StringValue(resp.GetName()) m.Name = types.StringValue(resp.GetName())
@ -141,13 +146,13 @@ func mapGetDataInstanceResponseToModel(
} }
m.Replicas = types.Int64Value(int64(resp.GetReplicas())) m.Replicas = types.Int64Value(int64(resp.GetReplicas()))
m.RetentionDays = types.Int64Value(int64(resp.GetRetentionDays())) m.RetentionDays = types.Int64Value(resp.GetRetentionDays())
m.Status = types.StringValue(string(resp.GetStatus())) m.Status = types.StringValue(string(resp.GetStatus()))
storage, diags := postgresflexalphadatasource.NewStorageValue( storage, diags := postgresflexalphadatasource.NewStorageValue(
postgresflexalphadatasource.StorageValue{}.AttributeTypes(ctx), postgresflexalphadatasource.StorageValue{}.AttributeTypes(ctx),
map[string]attr.Value{ map[string]attr.Value{
"performance_class": types.StringValue(resp.Storage.GetPerformanceClass()), "performance_class": types.StringValue(resp.Storage.GetPerformanceClass()),
"size": types.Int64Value(int64(resp.Storage.GetSize())), "size": types.Int64Value(resp.Storage.GetSize()),
}, },
) )
if diags.HasError() { if diags.HasError() {
@ -159,7 +164,9 @@ func mapGetDataInstanceResponseToModel(
} }
func handleConnectionInfo(ctx context.Context, m *dataSourceModel, resp *postgresflex.GetInstanceResponse) { func handleConnectionInfo(ctx context.Context, m *dataSourceModel, resp *postgresflex.GetInstanceResponse) {
isConnectionInfoIncomplete := resp.ConnectionInfo.Write.Host == "" || resp.ConnectionInfo.Write.Port == 0 isConnectionInfoIncomplete := resp.ConnectionInfo == nil ||
resp.ConnectionInfo.Host == nil || *resp.ConnectionInfo.Host == "" ||
resp.ConnectionInfo.Port == nil || *resp.ConnectionInfo.Port == 0
if isConnectionInfoIncomplete { if isConnectionInfoIncomplete {
m.ConnectionInfo = postgresflexalphadatasource.NewConnectionInfoValueNull() m.ConnectionInfo = postgresflexalphadatasource.NewConnectionInfoValueNull()
@ -167,39 +174,34 @@ func handleConnectionInfo(ctx context.Context, m *dataSourceModel, resp *postgre
m.ConnectionInfo = postgresflexalphadatasource.NewConnectionInfoValueMust( m.ConnectionInfo = postgresflexalphadatasource.NewConnectionInfoValueMust(
postgresflexalphadatasource.ConnectionInfoValue{}.AttributeTypes(ctx), postgresflexalphadatasource.ConnectionInfoValue{}.AttributeTypes(ctx),
map[string]attr.Value{ map[string]attr.Value{
"write": types.ObjectValueMust( "host": types.StringPointerValue(resp.ConnectionInfo.Host),
postgresflexalphadatasource.WriteValue{}.AttributeTypes(ctx), "port": types.Int64PointerValue(resp.ConnectionInfo.Port),
map[string]attr.Value{
"host": types.StringValue(resp.ConnectionInfo.Write.Host),
"port": types.Int64Value(int64(resp.ConnectionInfo.Write.Port)),
},
),
}, },
) )
} }
} }
func handleNetwork(ctx context.Context, m *dataSourceModel, resp *postgresflex.GetInstanceResponse) error { func handleNetwork(ctx context.Context, m *dataSourceModel, resp *postgresflex.GetInstanceResponse) error {
netACL, diags := types.ListValueFrom(ctx, types.StringType, resp.Network.GetAcl()) netAcl, diags := types.ListValueFrom(ctx, types.StringType, resp.Network.GetAcl())
if diags.HasError() { if diags.HasError() {
return fmt.Errorf("failed converting network acl from response") return fmt.Errorf("failed converting network acl from response")
} }
instAddr := "" instAddr := ""
if iA, ok := resp.Network.GetInstanceAddressOk(); ok { if iA, ok := resp.Network.GetInstanceAddressOk(); ok {
instAddr = *iA instAddr = iA
} }
rtrAddr := "" rtrAddr := ""
if rA, ok := resp.Network.GetRouterAddressOk(); ok { if rA, ok := resp.Network.GetRouterAddressOk(); ok {
rtrAddr = *rA rtrAddr = rA
} }
net, diags := postgresflexalphadatasource.NewNetworkValue( net, diags := postgresflexalphadatasource.NewNetworkValue(
postgresflexalphadatasource.NetworkValue{}.AttributeTypes(ctx), postgresflexalphadatasource.NetworkValue{}.AttributeTypes(ctx),
map[string]attr.Value{ map[string]attr.Value{
"access_scope": types.StringValue(string(resp.Network.GetAccessScope())), "access_scope": types.StringValue(string(resp.Network.GetAccessScope())),
"acl": netACL, "acl": netAcl,
"instance_address": types.StringValue(instAddr), "instance_address": types.StringValue(instAddr),
"router_address": types.StringValue(rtrAddr), "router_address": types.StringValue(rtrAddr),
}, },
@ -214,22 +216,22 @@ func handleNetwork(ctx context.Context, m *dataSourceModel, resp *postgresflex.G
func handleEncryption(m *dataSourceModel, resp *postgresflex.GetInstanceResponse) { func handleEncryption(m *dataSourceModel, resp *postgresflex.GetInstanceResponse) {
keyId := "" keyId := ""
if keyIdVal, ok := resp.Encryption.GetKekKeyIdOk(); ok { if keyIdVal, ok := resp.Encryption.GetKekKeyIdOk(); ok {
keyId = *keyIdVal keyId = keyIdVal
} }
keyRingId := "" keyRingId := ""
if keyRingIdVal, ok := resp.Encryption.GetKekKeyRingIdOk(); ok { if keyRingIdVal, ok := resp.Encryption.GetKekKeyRingIdOk(); ok {
keyRingId = *keyRingIdVal keyRingId = keyRingIdVal
} }
keyVersion := "" keyVersion := ""
if keyVersionVal, ok := resp.Encryption.GetKekKeyVersionOk(); ok { if keyVersionVal, ok := resp.Encryption.GetKekKeyVersionOk(); ok {
keyVersion = *keyVersionVal keyVersion = keyVersionVal
} }
svcAcc := "" svcAcc := ""
if svcAccVal, ok := resp.Encryption.GetServiceAccountOk(); ok { if svcAccVal, ok := resp.Encryption.GetServiceAccountOk(); ok {
svcAcc = *svcAccVal svcAcc = svcAccVal
} }
m.Encryption = postgresflexalphadatasource.EncryptionValue{ m.Encryption = postgresflexalphadatasource.EncryptionValue{

View file

@ -1,191 +1,746 @@
package postgresflexalpha package postgresflexalpha
import ( import (
"context" "github.com/stackitcloud/stackit-sdk-go/core/utils"
"testing"
"github.com/hashicorp/terraform-plugin-framework/types" postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
postgresflex "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
postgresflexalpharesource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/resources_gen"
utils2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
) )
func Test_handleConnectionInfo(t *testing.T) { //nolint:unused // TODO: remove when used
type args struct { type testFlavor struct {
ctx context.Context Cpu int64
m *dataSourceModel Description string
hostName string Id string
port int32 MaxGB int64
Memory int64
MinGB int64
NodeType string
StorageClasses []testFlavorStorageClass
} }
tests := []struct {
name string //nolint:unused // TODO: remove when used
args args type testFlavorStorageClass struct {
}{ Class string
MaxIoPerSec int64
MaxThroughInMb int64
}
//nolint:unused // TODO: remove when used
var responseList = []testFlavor{
{ {
name: "empty connection info", Cpu: 1,
args: args{ Description: "flavor 1.1",
ctx: context.TODO(), Id: "flv1.1",
m: &dataSourceModel{}, MaxGB: 500,
hostName: "", Memory: 1,
port: 0, MinGB: 5,
NodeType: "single",
StorageClasses: []testFlavorStorageClass{
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
}, },
}, },
{ {
name: "empty connection info host", Cpu: 1,
args: args{ Description: "flavor 1.2",
ctx: context.TODO(), Id: "flv1.2",
m: &dataSourceModel{}, MaxGB: 500,
hostName: "", Memory: 2,
port: 1234, MinGB: 5,
NodeType: "single",
StorageClasses: []testFlavorStorageClass{
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
}, },
}, },
{ {
name: "empty connection info port", Cpu: 1,
args: args{ Description: "flavor 1.3",
ctx: context.TODO(), Id: "flv1.3",
m: &dataSourceModel{}, MaxGB: 500,
hostName: "hostname", Memory: 3,
port: 0, MinGB: 5,
NodeType: "single",
StorageClasses: []testFlavorStorageClass{
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
}, },
}, },
{ {
name: "valid connection info", Cpu: 1,
args: args{ Description: "flavor 1.4",
ctx: context.TODO(), Id: "flv1.4",
m: &dataSourceModel{}, MaxGB: 500,
hostName: "host", Memory: 4,
port: 1000, MinGB: 5,
NodeType: "single",
StorageClasses: []testFlavorStorageClass{
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
}, },
}, },
} {
for _, tt := range tests { Cpu: 1,
t.Run(tt.name, func(t *testing.T) { Description: "flavor 1.5",
resp := &postgresflex.GetInstanceResponse{ Id: "flv1.5",
ConnectionInfo: postgresflex.InstanceConnectionInfo{ MaxGB: 500,
Write: postgresflex.InstanceConnectionInfoWrite{ Memory: 5,
Host: tt.args.hostName, MinGB: 5,
Port: int32(tt.args.port), NodeType: "single",
StorageClasses: []testFlavorStorageClass{
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
}, },
}, },
{
Cpu: 1,
Description: "flavor 1.6",
Id: "flv1.6",
MaxGB: 500,
Memory: 6,
MinGB: 5,
NodeType: "single",
StorageClasses: []testFlavorStorageClass{
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
},
},
{
Cpu: 1,
Description: "flavor 1.7",
Id: "flv1.7",
MaxGB: 500,
Memory: 7,
MinGB: 5,
NodeType: "single",
StorageClasses: []testFlavorStorageClass{
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
},
},
{
Cpu: 1,
Description: "flavor 1.8",
Id: "flv1.8",
MaxGB: 500,
Memory: 8,
MinGB: 5,
NodeType: "single",
StorageClasses: []testFlavorStorageClass{
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
},
},
{
Cpu: 1,
Description: "flavor 1.9",
Id: "flv1.9",
MaxGB: 500,
Memory: 9,
MinGB: 5,
NodeType: "single",
StorageClasses: []testFlavorStorageClass{
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
},
},
/* ......................................................... */
{
Cpu: 2,
Description: "flavor 2.1",
Id: "flv2.1",
MaxGB: 500,
Memory: 1,
MinGB: 5,
NodeType: "single",
StorageClasses: []testFlavorStorageClass{
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
},
},
{
Cpu: 2,
Description: "flavor 2.2",
Id: "flv2.2",
MaxGB: 500,
Memory: 2,
MinGB: 5,
NodeType: "single",
StorageClasses: []testFlavorStorageClass{
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
},
},
{
Cpu: 2,
Description: "flavor 2.3",
Id: "flv2.3",
MaxGB: 500,
Memory: 3,
MinGB: 5,
NodeType: "single",
StorageClasses: []testFlavorStorageClass{
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
},
},
{
Cpu: 2,
Description: "flavor 2.4",
Id: "flv2.4",
MaxGB: 500,
Memory: 4,
MinGB: 5,
NodeType: "single",
StorageClasses: []testFlavorStorageClass{
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
},
},
{
Cpu: 2,
Description: "flavor 2.5",
Id: "flv2.5",
MaxGB: 500,
Memory: 5,
MinGB: 5,
NodeType: "single",
StorageClasses: []testFlavorStorageClass{
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
},
},
{
Cpu: 2,
Description: "flavor 2.6",
Id: "flv2.6",
MaxGB: 500,
Memory: 6,
MinGB: 5,
NodeType: "single",
StorageClasses: []testFlavorStorageClass{
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
},
},
/* ......................................................... */
{
Cpu: 1,
Description: "flavor 1.1 replica",
Id: "flv1.1r",
MaxGB: 500,
Memory: 1,
MinGB: 5,
NodeType: "Replica",
StorageClasses: []testFlavorStorageClass{
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
},
},
{
Cpu: 1,
Description: "flavor 1.2 replica",
Id: "flv1.2r",
MaxGB: 500,
Memory: 2,
MinGB: 5,
NodeType: "Replica",
StorageClasses: []testFlavorStorageClass{
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
},
},
{
Cpu: 1,
Description: "flavor 1.3 replica",
Id: "flv1.3r",
MaxGB: 500,
Memory: 3,
MinGB: 5,
NodeType: "Replica",
StorageClasses: []testFlavorStorageClass{
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
},
},
{
Cpu: 1,
Description: "flavor 1.4 replica",
Id: "flv1.4r",
MaxGB: 500,
Memory: 4,
MinGB: 5,
NodeType: "Replica",
StorageClasses: []testFlavorStorageClass{
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
},
},
{
Cpu: 1,
Description: "flavor 1.5 replica",
Id: "flv1.5r",
MaxGB: 500,
Memory: 5,
MinGB: 5,
NodeType: "Replica",
StorageClasses: []testFlavorStorageClass{
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
},
},
{
Cpu: 1,
Description: "flavor 1.6 replica",
Id: "flv1.6r",
MaxGB: 500,
Memory: 6,
MinGB: 5,
NodeType: "Replica",
StorageClasses: []testFlavorStorageClass{
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
},
},
/* ......................................................... */
{
Cpu: 2,
Description: "flavor 2.1 replica",
Id: "flv2.1r",
MaxGB: 500,
Memory: 1,
MinGB: 5,
NodeType: "Replica",
StorageClasses: []testFlavorStorageClass{
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
},
},
{
Cpu: 2,
Description: "flavor 2.2 replica",
Id: "flv2.2r",
MaxGB: 500,
Memory: 2,
MinGB: 5,
NodeType: "Replica",
StorageClasses: []testFlavorStorageClass{
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
},
},
{
Cpu: 2,
Description: "flavor 2.3 replica",
Id: "flv2.3r",
MaxGB: 500,
Memory: 3,
MinGB: 5,
NodeType: "Replica",
StorageClasses: []testFlavorStorageClass{
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
},
},
{
Cpu: 2,
Description: "flavor 2.4 replica",
Id: "flv2.4r",
MaxGB: 500,
Memory: 4,
MinGB: 5,
NodeType: "Replica",
StorageClasses: []testFlavorStorageClass{
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
},
},
{
Cpu: 2,
Description: "flavor 2.5 replica",
Id: "flv2.5r",
MaxGB: 500,
Memory: 5,
MinGB: 5,
NodeType: "Replica",
StorageClasses: []testFlavorStorageClass{
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
},
},
{
Cpu: 2,
Description: "flavor 2.6 replica",
Id: "flv2.6r",
MaxGB: 500,
Memory: 6,
MinGB: 5,
NodeType: "Replica",
StorageClasses: []testFlavorStorageClass{
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
},
},
/* ......................................................... */
} }
handleConnectionInfo(tt.args.ctx, tt.args.m, resp) //nolint:unused // TODO: remove when used
func testFlavorListToResponseFlavorList(f []testFlavor) []postgresflex.ListFlavors {
result := make([]postgresflex.ListFlavors, len(f))
for i, flavor := range f {
result[i] = testFlavorToResponseFlavor(flavor)
}
return result
}
if tt.args.hostName == "" || tt.args.port == 0 { //nolint:unused // TODO: remove when used
if !tt.args.m.ConnectionInfo.IsNull() { func testFlavorToResponseFlavor(f testFlavor) postgresflex.ListFlavors {
t.Errorf("expected connection info to be null") var scList []postgresflex.FlavorStorageClassesStorageClass
for _, fl := range f.StorageClasses {
scList = append(
scList, postgresflex.FlavorStorageClassesStorageClass{
Class: utils.Ptr(fl.Class),
MaxIoPerSec: utils.Ptr(fl.MaxIoPerSec),
MaxThroughInMb: utils.Ptr(fl.MaxThroughInMb),
},
)
}
return postgresflex.ListFlavors{
Cpu: utils.Ptr(f.Cpu),
Description: utils.Ptr(f.Description),
Id: utils.Ptr(f.Id),
MaxGB: utils.Ptr(f.MaxGB),
Memory: utils.Ptr(f.Memory),
MinGB: utils.Ptr(f.MinGB),
NodeType: utils.Ptr(f.NodeType),
StorageClasses: &scList,
} }
} }
if tt.args.hostName != "" && tt.args.port != 0 { // func Test_getAllFlavors(t *testing.T) {
res := tt.args.m.ConnectionInfo.Write.Attributes() // type args struct {
gotHost := "" // projectId string
if r, ok := res["host"]; ok { // region string
gotHost = utils2.RemoveQuotes(r.String()) // }
} // tests := []struct {
if gotHost != tt.args.hostName { // name string
t.Errorf("host value incorrect: want: %s - got: %s", tt.args.hostName, gotHost) // args args
} // firstItem int
// lastItem int
// want []postgresflex.ListFlavors
// wantErr bool
// }{
// {
// name: "find exactly one flavor",
// args: args{
// projectId: "project",
// region: "region",
// },
// firstItem: 0,
// lastItem: 0,
// want: []postgresflex.ListFlavors{
// testFlavorToResponseFlavor(responseList[0]),
// },
// wantErr: false,
// },
// {
// name: "get exactly 1 page flavors",
// args: args{
// projectId: "project",
// region: "region",
// },
// firstItem: 0,
// lastItem: 9,
// want: testFlavorListToResponseFlavorList(responseList[0:10]),
// wantErr: false,
// },
// {
// name: "get exactly 20 flavors",
// args: args{
// projectId: "project",
// region: "region",
// },
// firstItem: 0,
// lastItem: 20,
// // 0 indexed therefore we want :21
// want: testFlavorListToResponseFlavorList(responseList[0:21]),
// wantErr: false,
// },
// {
// name: "get all flavors",
// args: args{
// projectId: "project",
// region: "region",
// },
// firstItem: 0,
// lastItem: len(responseList),
// want: testFlavorListToResponseFlavorList(responseList),
// wantErr: false,
// },
// }
// for _, tt := range tests {
// t.Run(tt.name, func(t *testing.T) {
// first := tt.firstItem
// if first > len(responseList)-1 {
// first = len(responseList) - 1
// }
// last := tt.lastItem
// if last > len(responseList)-1 {
// last = len(responseList) - 1
// }
// mockClient := postgresFlexClientMocked{
// returnError: tt.wantErr,
// firstItem: first,
// lastItem: last,
// }
// got, err := getAllFlavors(context.TODO(), mockClient, tt.args.projectId, tt.args.region)
// if (err != nil) != tt.wantErr {
// t.Errorf("getAllFlavors() error = %v, wantErr %v", err, tt.wantErr)
// return
// }
//
// if diff := cmp.Diff(tt.want, got); diff != "" {
// t.Errorf("mismatch (-want +got):\n%s", diff)
// }
//
// if !reflect.DeepEqual(got, tt.want) {
// t.Errorf("getAllFlavors() got = %v, want %v", got, tt.want)
// }
// })
// }
//}
gotPort, ok := res["port"] // func Test_loadFlavorId(t *testing.T) {
if !ok { // type args struct {
t.Errorf("could not find a value for port in connection_info.write") // ctx context.Context
} // model *Model
if !gotPort.Equal(types.Int64Value(int64(tt.args.port))) { // storage *storageModel
t.Errorf("port value incorrect: want: %d - got: %s", tt.args.port, gotPort.String()) // }
} // tests := []struct {
} // name string
}) // args args
} // firstItem int
} // lastItem int
// want []postgresflex.ListFlavors
func Test_handleEncryption(t *testing.T) { // wantErr bool
t.Skipf("please implement") // }{
type args struct { // {
m *dataSourceModel // name: "find a single flavor",
resp *postgresflex.GetInstanceResponse // args: args{
} // ctx: context.Background(),
tests := []struct { // model: &Model{
name string // ProjectId: basetypes.NewStringValue("project"),
args args // Region: basetypes.NewStringValue("region"),
}{ // },
// TODO: Add test cases. // storage: &storageModel{
} // Class: basetypes.NewStringValue("sc1"),
for _, tt := range tests { // Size: basetypes.NewInt64Value(100),
t.Run(tt.name, func(t *testing.T) { // },
handleEncryption(tt.args.m, tt.args.resp) // },
t.Logf("need to implement more") // firstItem: 0,
}) // lastItem: 3,
} // want: []postgresflex.ListFlavors{
} // testFlavorToResponseFlavor(responseList[0]),
// },
func Test_handleNetwork(t *testing.T) { // wantErr: false,
t.Skipf("please implement") // },
type args struct { // {
ctx context.Context // name: "find a single flavor by replicas option",
m *dataSourceModel // args: args{
resp *postgresflex.GetInstanceResponse // ctx: context.Background(),
} // model: &Model{
tests := []struct { // ProjectId: basetypes.NewStringValue("project"),
name string // Region: basetypes.NewStringValue("region"),
args args // Replicas: basetypes.NewInt64Value(1),
wantErr bool // },
}{ // storage: &storageModel{
// TODO: Add test cases. // Class: basetypes.NewStringValue("sc1"),
} // Size: basetypes.NewInt64Value(100),
for _, tt := range tests { // },
t.Run(tt.name, func(t *testing.T) { // },
if err := handleNetwork(tt.args.ctx, tt.args.m, tt.args.resp); (err != nil) != tt.wantErr { // firstItem: 0,
t.Errorf("handleNetwork() error = %v, wantErr %v", err, tt.wantErr) // lastItem: 3,
} // want: []postgresflex.ListFlavors{
}) // testFlavorToResponseFlavor(responseList[0]),
} // },
} // wantErr: false,
// },
func Test_mapGetDataInstanceResponseToModel(t *testing.T) { // {
t.Skipf("please implement") // name: "fail finding find a single flavor by replicas option",
type args struct { // args: args{
ctx context.Context // ctx: context.Background(),
m *dataSourceModel // model: &Model{
resp *postgresflex.GetInstanceResponse // ProjectId: basetypes.NewStringValue("project"),
} // Region: basetypes.NewStringValue("region"),
tests := []struct { // Replicas: basetypes.NewInt64Value(1),
name string // },
args args // storage: &storageModel{
wantErr bool // Class: basetypes.NewStringValue("sc1"),
}{ // Size: basetypes.NewInt64Value(100),
// TODO: Add test cases. // },
} // },
for _, tt := range tests { // firstItem: 13,
t.Run(tt.name, func(t *testing.T) { // lastItem: 23,
if err := mapGetDataInstanceResponseToModel(tt.args.ctx, tt.args.m, tt.args.resp); (err != nil) != tt.wantErr { // want: []postgresflex.ListFlavors{},
t.Errorf("mapGetDataInstanceResponseToModel() error = %v, wantErr %v", err, tt.wantErr) // wantErr: true,
} // },
}) // {
} // name: "find a replicas flavor lower case",
} // args: args{
// ctx: context.Background(),
func Test_mapGetInstanceResponseToModel(t *testing.T) { // model: &Model{
t.Skipf("please implement") // ProjectId: basetypes.NewStringValue("project"),
type args struct { // Region: basetypes.NewStringValue("region"),
ctx context.Context // },
m *postgresflexalpharesource.InstanceModel // storage: &storageModel{
resp *postgresflex.GetInstanceResponse // Class: basetypes.NewStringValue("sc1"),
} // Size: basetypes.NewInt64Value(100),
tests := []struct { // },
name string // },
args args // firstItem: 0,
wantErr bool // lastItem: len(responseList) - 1,
}{ // want: []postgresflex.ListFlavors{
// TODO: Add test cases. // testFlavorToResponseFlavor(responseList[16]),
} // },
for _, tt := range tests { // wantErr: false,
t.Run(tt.name, func(t *testing.T) { // },
if err := mapGetInstanceResponseToModel(tt.args.ctx, tt.args.m, tt.args.resp); (err != nil) != tt.wantErr { // {
t.Errorf("mapGetInstanceResponseToModel() error = %v, wantErr %v", err, tt.wantErr) // name: "find a replicas flavor CamelCase",
} // args: args{
}) // ctx: context.Background(),
} // model: &Model{
} // ProjectId: basetypes.NewStringValue("project"),
// Region: basetypes.NewStringValue("region"),
// },
// storage: &storageModel{
// Class: basetypes.NewStringValue("sc1"),
// Size: basetypes.NewInt64Value(100),
// },
// },
// firstItem: 0,
// lastItem: len(responseList) - 1,
// want: []postgresflex.ListFlavors{
// testFlavorToResponseFlavor(responseList[16]),
// },
// wantErr: false,
// },
// {
// name: "find a replicas flavor by replicas option",
// args: args{
// ctx: context.Background(),
// model: &Model{
// ProjectId: basetypes.NewStringValue("project"),
// Region: basetypes.NewStringValue("region"),
// Replicas: basetypes.NewInt64Value(3),
// },
// flavor: &flavorModel{
// CPU: basetypes.NewInt64Value(1),
// RAM: basetypes.NewInt64Value(1),
// },
// storage: &storageModel{
// Class: basetypes.NewStringValue("sc1"),
// Size: basetypes.NewInt64Value(100),
// },
// },
// firstItem: 0,
// lastItem: len(responseList) - 1,
// want: []postgresflex.ListFlavors{
// testFlavorToResponseFlavor(responseList[16]),
// },
// wantErr: false,
// },
// {
// name: "fail finding a replica flavor",
// args: args{
// ctx: context.Background(),
// model: &Model{
// ProjectId: basetypes.NewStringValue("project"),
// Region: basetypes.NewStringValue("region"),
// Replicas: basetypes.NewInt64Value(3),
// },
// flavor: &flavorModel{
// CPU: basetypes.NewInt64Value(1),
// RAM: basetypes.NewInt64Value(1),
// },
// storage: &storageModel{
// Class: basetypes.NewStringValue("sc1"),
// Size: basetypes.NewInt64Value(100),
// },
// },
// firstItem: 0,
// lastItem: 10,
// want: []postgresflex.ListFlavors{},
// wantErr: true,
// },
// {
// name: "no flavor found error",
// args: args{
// ctx: context.Background(),
// model: &Model{
// ProjectId: basetypes.NewStringValue("project"),
// Region: basetypes.NewStringValue("region"),
// },
// flavor: &flavorModel{
// CPU: basetypes.NewInt64Value(10),
// RAM: basetypes.NewInt64Value(1000),
// NodeType: basetypes.NewStringValue("Single"),
// },
// storage: &storageModel{
// Class: basetypes.NewStringValue("sc1"),
// Size: basetypes.NewInt64Value(100),
// },
// },
// firstItem: 0,
// lastItem: 3,
// want: []postgresflex.ListFlavors{},
// wantErr: true,
// },
// }
// for _, tt := range tests {
// t.Run(tt.name, func(t *testing.T) {
// first := tt.firstItem
// if first > len(responseList)-1 {
// first = len(responseList) - 1
// }
// last := tt.lastItem
// if last > len(responseList)-1 {
// last = len(responseList) - 1
// }
// mockClient := postgresFlexClientMocked{
// returnError: tt.wantErr,
// firstItem: first,
// lastItem: last,
// }
// if err := loadFlavorId(tt.args.ctx, mockClient, tt.args.model, tt.args.flavor, tt.args.storage); (err != nil) != tt.wantErr {
// t.Errorf("loadFlavorId() error = %v, wantErr %v", err, tt.wantErr)
// }
// })
// }
//}

View file

@ -14,9 +14,8 @@ import (
"github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-log/tflog" "github.com/hashicorp/terraform-plugin-log/tflog"
"github.com/stackitcloud/stackit-sdk-go/core/oapierror" "github.com/stackitcloud/stackit-sdk-go/core/oapierror"
coreUtils "github.com/stackitcloud/stackit-sdk-go/core/utils"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
postgresflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/resources_gen" postgresflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/resources_gen"
@ -51,7 +50,7 @@ type InstanceResourceIdentityModel struct {
// instanceResource is the resource implementation. // instanceResource is the resource implementation.
type instanceResource struct { type instanceResource struct {
client *v3alpha1api.APIClient client *postgresflex.APIClient
providerData core.ProviderData providerData core.ProviderData
} }
@ -196,9 +195,9 @@ func (r *instanceResource) Create(
ctx = core.InitProviderContext(ctx) ctx = core.InitProviderContext(ctx)
projectID := model.ProjectId.ValueString() projectId := model.ProjectId.ValueString()
region := model.Region.ValueString() region := model.Region.ValueString()
ctx = tflog.SetField(ctx, "project_id", projectID) ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "region", region) ctx = tflog.SetField(ctx, "region", region)
var netAcl []string var netAcl []string
@ -208,13 +207,17 @@ func (r *instanceResource) Create(
return return
} }
replVal := model.Replicas.ValueInt64() // nolint:gosec // check is performed above if model.Replicas.ValueInt64() > math.MaxInt32 {
resp.Diagnostics.AddError("invalid int32 value", "provided int64 value does not fit into int32")
return
}
replVal := int32(model.Replicas.ValueInt64()) // nolint:gosec // check is performed above
payload := modelToCreateInstancePayload(netAcl, model, replVal) payload := modelToCreateInstancePayload(netAcl, model, replVal)
// Create new instance // Create new instance
createResp, err := r.client.DefaultAPI.CreateInstanceRequest( createResp, err := r.client.CreateInstanceRequest(
ctx, ctx,
projectID, projectId,
region, region,
).CreateInstanceRequestPayload(payload).Execute() ).CreateInstanceRequestPayload(payload).Execute()
if err != nil { if err != nil {
@ -223,7 +226,7 @@ func (r *instanceResource) Create(
} }
ctx = core.LogResponse(ctx) ctx = core.LogResponse(ctx)
instanceID, ok := createResp.GetIdOk() instanceId, ok := createResp.GetIdOk()
if !ok { if !ok {
core.LogAndAddError(ctx, &resp.Diagnostics, "error creating instance", "could not find instance id in response") core.LogAndAddError(ctx, &resp.Diagnostics, "error creating instance", "could not find instance id in response")
return return
@ -231,16 +234,16 @@ func (r *instanceResource) Create(
// Set data returned by API in identity // Set data returned by API in identity
identity := InstanceResourceIdentityModel{ identity := InstanceResourceIdentityModel{
ProjectID: types.StringValue(projectID), ProjectID: types.StringValue(projectId),
Region: types.StringValue(region), Region: types.StringValue(region),
InstanceID: types.StringPointerValue(instanceID), InstanceID: types.StringValue(instanceId),
} }
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
if resp.Diagnostics.HasError() { if resp.Diagnostics.HasError() {
return return
} }
waitResp, err := wait.CreateInstanceWaitHandler(ctx, r.client.DefaultAPI, projectID, region, *instanceID). waitResp, err := wait.CreateInstanceWaitHandler(ctx, r.client, projectId, region, instanceId).
WaitWithContext(ctx) WaitWithContext(ctx)
if err != nil { if err != nil {
core.LogAndAddError( core.LogAndAddError(
@ -273,35 +276,37 @@ func (r *instanceResource) Create(
} }
func modelToCreateInstancePayload( func modelToCreateInstancePayload(
netACL []string, netAcl []string,
model postgresflexalpha.InstanceModel, model postgresflexalpha.InstanceModel,
replVal int64, replVal int32,
) v3alpha1api.CreateInstanceRequestPayload { ) postgresflex.CreateInstanceRequestPayload {
var enc *v3alpha1api.InstanceEncryption var enc *postgresflex.InstanceEncryption
if !model.Encryption.IsNull() && !model.Encryption.IsUnknown() { if !model.Encryption.IsNull() && !model.Encryption.IsUnknown() {
enc = &v3alpha1api.InstanceEncryption{ enc = &postgresflex.InstanceEncryption{
KekKeyId: model.Encryption.KekKeyId.ValueString(), KekKeyId: model.Encryption.KekKeyId.ValueStringPointer(),
KekKeyRingId: model.Encryption.KekKeyRingId.ValueString(), KekKeyRingId: model.Encryption.KekKeyRingId.ValueStringPointer(),
KekKeyVersion: model.Encryption.KekKeyVersion.ValueString(), KekKeyVersion: model.Encryption.KekKeyVersion.ValueStringPointer(),
ServiceAccount: model.Encryption.ServiceAccount.ValueString(), ServiceAccount: model.Encryption.ServiceAccount.ValueStringPointer(),
} }
} }
payload := v3alpha1api.CreateInstanceRequestPayload{ payload := postgresflex.CreateInstanceRequestPayload{
BackupSchedule: model.BackupSchedule.ValueString(), BackupSchedule: model.BackupSchedule.ValueStringPointer(),
Encryption: enc, Encryption: enc,
FlavorId: model.FlavorId.ValueString(), FlavorId: model.FlavorId.ValueStringPointer(),
Name: model.Name.ValueString(), Name: model.Name.ValueStringPointer(),
Network: v3alpha1api.InstanceNetworkCreate{ Network: &postgresflex.InstanceNetworkCreate{
AccessScope: (*v3alpha1api.InstanceNetworkAccessScope)(model.Network.AccessScope.ValueStringPointer()), AccessScope: postgresflex.InstanceNetworkGetAccessScopeAttributeType(
Acl: netACL, model.Network.AccessScope.ValueStringPointer(),
),
Acl: &netAcl,
}, },
Replicas: v3alpha1api.Replicas(replVal), //nolint:gosec // TODO Replicas: postgresflex.CreateInstanceRequestPayloadGetReplicasAttributeType(&replVal),
RetentionDays: int32(model.RetentionDays.ValueInt64()), //nolint:gosec // TODO RetentionDays: model.RetentionDays.ValueInt64Pointer(),
Storage: v3alpha1api.StorageCreate{ Storage: &postgresflex.StorageCreate{
PerformanceClass: model.Storage.PerformanceClass.ValueString(), PerformanceClass: model.Storage.PerformanceClass.ValueStringPointer(),
Size: int32(model.Storage.Size.ValueInt64()), //nolint:gosec // TODO Size: model.Storage.Size.ValueInt64Pointer(),
}, },
Version: model.Version.ValueString(), Version: model.Version.ValueStringPointer(),
} }
return payload return payload
} }
@ -323,6 +328,10 @@ func (r *instanceResource) Read(
ctx = core.InitProviderContext(ctx) ctx = core.InitProviderContext(ctx)
// projectId := model.ProjectId.ValueString()
// region := r.providerData.GetRegionWithOverride(model.Region)
// instanceId := model.InstanceId.ValueString()
var projectId string var projectId string
if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() { if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() {
projectId = model.ProjectId.ValueString() projectId = model.ProjectId.ValueString()
@ -342,7 +351,7 @@ func (r *instanceResource) Read(
ctx = tflog.SetField(ctx, "instance_id", instanceId) ctx = tflog.SetField(ctx, "instance_id", instanceId)
ctx = tflog.SetField(ctx, "region", region) ctx = tflog.SetField(ctx, "region", region)
instanceResp, err := r.client.DefaultAPI.GetInstanceRequest(ctx, projectId, region, instanceId).Execute() instanceResp, err := r.client.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
if err != nil { if err != nil {
oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
if ok && oapiErr.StatusCode == http.StatusNotFound { if ok && oapiErr.StatusCode == http.StatusNotFound {
@ -361,7 +370,7 @@ func (r *instanceResource) Read(
return return
} }
if !model.InstanceId.IsUnknown() && !model.InstanceId.IsNull() { if !model.InstanceId.IsUnknown() && !model.InstanceId.IsNull() {
if *respInstanceID != instanceId { if respInstanceID != instanceId {
core.LogAndAddError( core.LogAndAddError(
ctx, ctx,
&resp.Diagnostics, &resp.Diagnostics,
@ -426,56 +435,59 @@ func (r *instanceResource) Update(
return return
} }
projectID := identityData.ProjectID.ValueString() // if model.InstanceId.IsNull() || model.InstanceId.IsUnknown() {
instanceID := identityData.InstanceID.ValueString() // core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating instance", "instanceId is null or unknown")
// return
//}
//
// if model.ProjectId.IsNull() || model.ProjectId.IsUnknown() {
// core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating instance", "projectId is null or unknown")
// return
//}
// projectId := model.ProjectId.ValueString()
// instanceId := model.InstanceId.ValueString()
projectId := identityData.ProjectID.ValueString()
instanceId := identityData.InstanceID.ValueString()
region := model.Region.ValueString() region := model.Region.ValueString()
ctx = tflog.SetField(ctx, "project_id", projectID) ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "instance_id", instanceID) ctx = tflog.SetField(ctx, "instance_id", instanceId)
ctx = tflog.SetField(ctx, "region", region) ctx = tflog.SetField(ctx, "region", region)
var netACL []string var netAcl []string
diag := model.Network.Acl.ElementsAs(ctx, &netACL, false) diag := model.Network.Acl.ElementsAs(ctx, &netAcl, false)
resp.Diagnostics.Append(diags...) resp.Diagnostics.Append(diags...)
if diag.HasError() { if diag.HasError() {
return return
} }
if model.Replicas.ValueInt64() > math.MaxInt32 { if model.Replicas.ValueInt64() > math.MaxInt32 {
core.LogAndAddError(ctx, &resp.Diagnostics, "UPDATE", "replicas value too large for int32") resp.Diagnostics.AddError("invalid int32 value", "provided int64 value does not fit into int32")
return return
} }
replInt32 := int32(model.Replicas.ValueInt64()) // nolint:gosec // check is performed above
if model.RetentionDays.ValueInt64() > math.MaxInt32 { payload := postgresflex.UpdateInstanceRequestPayload{
core.LogAndAddError(ctx, &resp.Diagnostics, "UPDATE", "retention_days value too large for int32") BackupSchedule: model.BackupSchedule.ValueStringPointer(),
return FlavorId: model.FlavorId.ValueStringPointer(),
} Name: model.Name.ValueStringPointer(),
Network: &postgresflex.InstanceNetworkUpdate{
if model.Storage.Size.ValueInt64() > math.MaxInt32 { Acl: &netAcl,
core.LogAndAddError(ctx, &resp.Diagnostics, "UPDATE", "storage.size value too large for int32")
return
}
payload := v3alpha1api.UpdateInstanceRequestPayload{
BackupSchedule: model.BackupSchedule.ValueString(),
FlavorId: model.FlavorId.ValueString(),
Name: model.Name.ValueString(),
Network: v3alpha1api.InstanceNetworkUpdate{
Acl: netACL,
}, },
Replicas: v3alpha1api.Replicas(model.Replicas.ValueInt64()), //nolint:gosec // checked above Replicas: postgresflex.UpdateInstanceRequestPayloadGetReplicasAttributeType(&replInt32),
RetentionDays: int32(model.RetentionDays.ValueInt64()), //nolint:gosec // checked above RetentionDays: model.RetentionDays.ValueInt64Pointer(),
Storage: v3alpha1api.StorageUpdate{ Storage: &postgresflex.StorageUpdate{
Size: coreUtils.Ptr(int32(model.Storage.Size.ValueInt64())), //nolint:gosec // checked above Size: model.Storage.Size.ValueInt64Pointer(),
}, },
Version: model.Version.ValueString(), Version: model.Version.ValueStringPointer(),
} }
// Update existing instance // Update existing instance
err := r.client.DefaultAPI.UpdateInstanceRequest( err := r.client.UpdateInstanceRequest(
ctx, ctx,
projectID, projectId,
region, region,
instanceID, instanceId,
).UpdateInstanceRequestPayload(payload).Execute() ).UpdateInstanceRequestPayload(payload).Execute()
if err != nil { if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating instance", err.Error()) core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating instance", err.Error())
@ -486,10 +498,10 @@ func (r *instanceResource) Update(
waitResp, err := wait.PartialUpdateInstanceWaitHandler( waitResp, err := wait.PartialUpdateInstanceWaitHandler(
ctx, ctx,
r.client.DefaultAPI, r.client,
projectID, projectId,
region, region,
instanceID, instanceId,
).WaitWithContext(ctx) ).WaitWithContext(ctx)
if err != nil { if err != nil {
core.LogAndAddError( core.LogAndAddError(
@ -544,7 +556,7 @@ func (r *instanceResource) Delete(
ctx = tflog.SetField(ctx, "region", region) ctx = tflog.SetField(ctx, "region", region)
// Delete existing instance // Delete existing instance
err := r.client.DefaultAPI.DeleteInstanceRequest(ctx, projectId, region, instanceId).Execute() err := r.client.DeleteInstanceRequest(ctx, projectId, region, instanceId).Execute()
if err != nil { if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting instance", fmt.Sprintf("Calling API: %v", err)) core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting instance", fmt.Sprintf("Calling API: %v", err))
return return
@ -552,7 +564,7 @@ func (r *instanceResource) Delete(
ctx = core.LogResponse(ctx) ctx = core.LogResponse(ctx)
_, err = r.client.DefaultAPI.GetInstanceRequest(ctx, projectId, region, instanceId).Execute() _, err = r.client.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
if err != nil { if err != nil {
oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
if ok && oapiErr.StatusCode != http.StatusNotFound { if ok && oapiErr.StatusCode != http.StatusNotFound {

View file

@ -30,12 +30,10 @@ func InstanceResourceSchema(ctx context.Context) schema.Schema {
}, },
"backup_schedule": schema.StringAttribute{ "backup_schedule": schema.StringAttribute{
Required: true, Required: true,
Description: "The schedule for when the database backup will be created. Currently, ONLY daily schedules are supported (every 24 hours). The schedule is written as a cron schedule.", Description: "The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.",
MarkdownDescription: "The schedule for when the database backup will be created. Currently, ONLY daily schedules are supported (every 24 hours). The schedule is written as a cron schedule.", MarkdownDescription: "The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.",
}, },
"connection_info": schema.SingleNestedAttribute{ "connection_info": schema.SingleNestedAttribute{
Attributes: map[string]schema.Attribute{
"write": schema.SingleNestedAttribute{
Attributes: map[string]schema.Attribute{ Attributes: map[string]schema.Attribute{
"host": schema.StringAttribute{ "host": schema.StringAttribute{
Computed: true, Computed: true,
@ -48,24 +46,14 @@ func InstanceResourceSchema(ctx context.Context) schema.Schema {
MarkdownDescription: "The port of the instance.", MarkdownDescription: "The port of the instance.",
}, },
}, },
CustomType: WriteType{
ObjectType: types.ObjectType{
AttrTypes: WriteValue{}.AttributeTypes(ctx),
},
},
Computed: true,
Description: "The DNS name and port in the instance overview",
MarkdownDescription: "The DNS name and port in the instance overview",
},
},
CustomType: ConnectionInfoType{ CustomType: ConnectionInfoType{
ObjectType: types.ObjectType{ ObjectType: types.ObjectType{
AttrTypes: ConnectionInfoValue{}.AttributeTypes(ctx), AttrTypes: ConnectionInfoValue{}.AttributeTypes(ctx),
}, },
}, },
Computed: true, Computed: true,
Description: "The connection information of the instance", Description: "The DNS name and port in the instance overview",
MarkdownDescription: "The connection information of the instance", MarkdownDescription: "The DNS name and port in the instance overview",
}, },
"encryption": schema.SingleNestedAttribute{ "encryption": schema.SingleNestedAttribute{
Attributes: map[string]schema.Attribute{ Attributes: map[string]schema.Attribute{
@ -275,22 +263,40 @@ func (t ConnectionInfoType) ValueFromObject(ctx context.Context, in basetypes.Ob
attributes := in.Attributes() attributes := in.Attributes()
writeAttribute, ok := attributes["write"] hostAttribute, ok := attributes["host"]
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Missing", "Attribute Missing",
`write is missing from object`) `host is missing from object`)
return nil, diags return nil, diags
} }
writeVal, ok := writeAttribute.(basetypes.ObjectValue) hostVal, ok := hostAttribute.(basetypes.StringValue)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`write expected to be basetypes.ObjectValue, was: %T`, writeAttribute)) fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
}
portAttribute, ok := attributes["port"]
if !ok {
diags.AddError(
"Attribute Missing",
`port is missing from object`)
return nil, diags
}
portVal, ok := portAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
} }
if diags.HasError() { if diags.HasError() {
@ -298,7 +304,8 @@ func (t ConnectionInfoType) ValueFromObject(ctx context.Context, in basetypes.Ob
} }
return ConnectionInfoValue{ return ConnectionInfoValue{
Write: writeVal, Host: hostVal,
Port: portVal,
state: attr.ValueStateKnown, state: attr.ValueStateKnown,
}, diags }, diags
} }
@ -366,22 +373,40 @@ func NewConnectionInfoValue(attributeTypes map[string]attr.Type, attributes map[
return NewConnectionInfoValueUnknown(), diags return NewConnectionInfoValueUnknown(), diags
} }
writeAttribute, ok := attributes["write"] hostAttribute, ok := attributes["host"]
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Missing", "Attribute Missing",
`write is missing from object`) `host is missing from object`)
return NewConnectionInfoValueUnknown(), diags return NewConnectionInfoValueUnknown(), diags
} }
writeVal, ok := writeAttribute.(basetypes.ObjectValue) hostVal, ok := hostAttribute.(basetypes.StringValue)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`write expected to be basetypes.ObjectValue, was: %T`, writeAttribute)) fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
}
portAttribute, ok := attributes["port"]
if !ok {
diags.AddError(
"Attribute Missing",
`port is missing from object`)
return NewConnectionInfoValueUnknown(), diags
}
portVal, ok := portAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
} }
if diags.HasError() { if diags.HasError() {
@ -389,7 +414,8 @@ func NewConnectionInfoValue(attributeTypes map[string]attr.Type, attributes map[
} }
return ConnectionInfoValue{ return ConnectionInfoValue{
Write: writeVal, Host: hostVal,
Port: portVal,
state: attr.ValueStateKnown, state: attr.ValueStateKnown,
}, diags }, diags
} }
@ -462,401 +488,12 @@ func (t ConnectionInfoType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = ConnectionInfoValue{} var _ basetypes.ObjectValuable = ConnectionInfoValue{}
type ConnectionInfoValue struct { type ConnectionInfoValue struct {
Write basetypes.ObjectValue `tfsdk:"write"`
state attr.ValueState
}
func (v ConnectionInfoValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
attrTypes := make(map[string]tftypes.Type, 1)
var val tftypes.Value
var err error
attrTypes["write"] = basetypes.ObjectType{
AttrTypes: WriteValue{}.AttributeTypes(ctx),
}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
switch v.state {
case attr.ValueStateKnown:
vals := make(map[string]tftypes.Value, 1)
val, err = v.Write.ToTerraformValue(ctx)
if err != nil {
return tftypes.NewValue(objectType, tftypes.UnknownValue), err
}
vals["write"] = val
if err := tftypes.ValidateValue(objectType, vals); err != nil {
return tftypes.NewValue(objectType, tftypes.UnknownValue), err
}
return tftypes.NewValue(objectType, vals), nil
case attr.ValueStateNull:
return tftypes.NewValue(objectType, nil), nil
case attr.ValueStateUnknown:
return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
default:
panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
}
}
func (v ConnectionInfoValue) IsNull() bool {
return v.state == attr.ValueStateNull
}
func (v ConnectionInfoValue) IsUnknown() bool {
return v.state == attr.ValueStateUnknown
}
func (v ConnectionInfoValue) String() string {
return "ConnectionInfoValue"
}
func (v ConnectionInfoValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
var diags diag.Diagnostics
var write basetypes.ObjectValue
if v.Write.IsNull() {
write = types.ObjectNull(
WriteValue{}.AttributeTypes(ctx),
)
}
if v.Write.IsUnknown() {
write = types.ObjectUnknown(
WriteValue{}.AttributeTypes(ctx),
)
}
if !v.Write.IsNull() && !v.Write.IsUnknown() {
write = types.ObjectValueMust(
WriteValue{}.AttributeTypes(ctx),
v.Write.Attributes(),
)
}
attributeTypes := map[string]attr.Type{
"write": basetypes.ObjectType{
AttrTypes: WriteValue{}.AttributeTypes(ctx),
},
}
if v.IsNull() {
return types.ObjectNull(attributeTypes), diags
}
if v.IsUnknown() {
return types.ObjectUnknown(attributeTypes), diags
}
objVal, diags := types.ObjectValue(
attributeTypes,
map[string]attr.Value{
"write": write,
})
return objVal, diags
}
func (v ConnectionInfoValue) Equal(o attr.Value) bool {
other, ok := o.(ConnectionInfoValue)
if !ok {
return false
}
if v.state != other.state {
return false
}
if v.state != attr.ValueStateKnown {
return true
}
if !v.Write.Equal(other.Write) {
return false
}
return true
}
func (v ConnectionInfoValue) Type(ctx context.Context) attr.Type {
return ConnectionInfoType{
basetypes.ObjectType{
AttrTypes: v.AttributeTypes(ctx),
},
}
}
func (v ConnectionInfoValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"write": basetypes.ObjectType{
AttrTypes: WriteValue{}.AttributeTypes(ctx),
},
}
}
var _ basetypes.ObjectTypable = WriteType{}
type WriteType struct {
basetypes.ObjectType
}
func (t WriteType) Equal(o attr.Type) bool {
other, ok := o.(WriteType)
if !ok {
return false
}
return t.ObjectType.Equal(other.ObjectType)
}
func (t WriteType) String() string {
return "WriteType"
}
func (t WriteType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
var diags diag.Diagnostics
attributes := in.Attributes()
hostAttribute, ok := attributes["host"]
if !ok {
diags.AddError(
"Attribute Missing",
`host is missing from object`)
return nil, diags
}
hostVal, ok := hostAttribute.(basetypes.StringValue)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
}
portAttribute, ok := attributes["port"]
if !ok {
diags.AddError(
"Attribute Missing",
`port is missing from object`)
return nil, diags
}
portVal, ok := portAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
}
if diags.HasError() {
return nil, diags
}
return WriteValue{
Host: hostVal,
Port: portVal,
state: attr.ValueStateKnown,
}, diags
}
func NewWriteValueNull() WriteValue {
return WriteValue{
state: attr.ValueStateNull,
}
}
func NewWriteValueUnknown() WriteValue {
return WriteValue{
state: attr.ValueStateUnknown,
}
}
func NewWriteValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (WriteValue, diag.Diagnostics) {
var diags diag.Diagnostics
// Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
ctx := context.Background()
for name, attributeType := range attributeTypes {
attribute, ok := attributes[name]
if !ok {
diags.AddError(
"Missing WriteValue Attribute Value",
"While creating a WriteValue value, a missing attribute value was detected. "+
"A WriteValue must contain values for all attributes, even if null or unknown. "+
"This is always an issue with the provider and should be reported to the provider developers.\n\n"+
fmt.Sprintf("WriteValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
)
continue
}
if !attributeType.Equal(attribute.Type(ctx)) {
diags.AddError(
"Invalid WriteValue Attribute Type",
"While creating a WriteValue value, an invalid attribute value was detected. "+
"A WriteValue must use a matching attribute type for the value. "+
"This is always an issue with the provider and should be reported to the provider developers.\n\n"+
fmt.Sprintf("WriteValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
fmt.Sprintf("WriteValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
)
}
}
for name := range attributes {
_, ok := attributeTypes[name]
if !ok {
diags.AddError(
"Extra WriteValue Attribute Value",
"While creating a WriteValue value, an extra attribute value was detected. "+
"A WriteValue must not contain values beyond the expected attribute types. "+
"This is always an issue with the provider and should be reported to the provider developers.\n\n"+
fmt.Sprintf("Extra WriteValue Attribute Name: %s", name),
)
}
}
if diags.HasError() {
return NewWriteValueUnknown(), diags
}
hostAttribute, ok := attributes["host"]
if !ok {
diags.AddError(
"Attribute Missing",
`host is missing from object`)
return NewWriteValueUnknown(), diags
}
hostVal, ok := hostAttribute.(basetypes.StringValue)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
}
portAttribute, ok := attributes["port"]
if !ok {
diags.AddError(
"Attribute Missing",
`port is missing from object`)
return NewWriteValueUnknown(), diags
}
portVal, ok := portAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
}
if diags.HasError() {
return NewWriteValueUnknown(), diags
}
return WriteValue{
Host: hostVal,
Port: portVal,
state: attr.ValueStateKnown,
}, diags
}
func NewWriteValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) WriteValue {
object, diags := NewWriteValue(attributeTypes, attributes)
if diags.HasError() {
// This could potentially be added to the diag package.
diagsStrings := make([]string, 0, len(diags))
for _, diagnostic := range diags {
diagsStrings = append(diagsStrings, fmt.Sprintf(
"%s | %s | %s",
diagnostic.Severity(),
diagnostic.Summary(),
diagnostic.Detail()))
}
panic("NewWriteValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
}
return object
}
func (t WriteType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
if in.Type() == nil {
return NewWriteValueNull(), nil
}
if !in.Type().Equal(t.TerraformType(ctx)) {
return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
}
if !in.IsKnown() {
return NewWriteValueUnknown(), nil
}
if in.IsNull() {
return NewWriteValueNull(), nil
}
attributes := map[string]attr.Value{}
val := map[string]tftypes.Value{}
err := in.As(&val)
if err != nil {
return nil, err
}
for k, v := range val {
a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
if err != nil {
return nil, err
}
attributes[k] = a
}
return NewWriteValueMust(WriteValue{}.AttributeTypes(ctx), attributes), nil
}
func (t WriteType) ValueType(ctx context.Context) attr.Value {
return WriteValue{}
}
var _ basetypes.ObjectValuable = WriteValue{}
type WriteValue struct {
Host basetypes.StringValue `tfsdk:"host"` Host basetypes.StringValue `tfsdk:"host"`
Port basetypes.Int64Value `tfsdk:"port"` Port basetypes.Int64Value `tfsdk:"port"`
state attr.ValueState state attr.ValueState
} }
func (v WriteValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { func (v ConnectionInfoValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
attrTypes := make(map[string]tftypes.Type, 2) attrTypes := make(map[string]tftypes.Type, 2)
var val tftypes.Value var val tftypes.Value
@ -901,19 +538,19 @@ func (v WriteValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error)
} }
} }
func (v WriteValue) IsNull() bool { func (v ConnectionInfoValue) IsNull() bool {
return v.state == attr.ValueStateNull return v.state == attr.ValueStateNull
} }
func (v WriteValue) IsUnknown() bool { func (v ConnectionInfoValue) IsUnknown() bool {
return v.state == attr.ValueStateUnknown return v.state == attr.ValueStateUnknown
} }
func (v WriteValue) String() string { func (v ConnectionInfoValue) String() string {
return "WriteValue" return "ConnectionInfoValue"
} }
func (v WriteValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { func (v ConnectionInfoValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
var diags diag.Diagnostics var diags diag.Diagnostics
attributeTypes := map[string]attr.Type{ attributeTypes := map[string]attr.Type{
@ -939,8 +576,8 @@ func (v WriteValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, d
return objVal, diags return objVal, diags
} }
func (v WriteValue) Equal(o attr.Value) bool { func (v ConnectionInfoValue) Equal(o attr.Value) bool {
other, ok := o.(WriteValue) other, ok := o.(ConnectionInfoValue)
if !ok { if !ok {
return false return false
@ -965,15 +602,15 @@ func (v WriteValue) Equal(o attr.Value) bool {
return true return true
} }
func (v WriteValue) Type(ctx context.Context) attr.Type { func (v ConnectionInfoValue) Type(ctx context.Context) attr.Type {
return WriteType{ return ConnectionInfoType{
basetypes.ObjectType{ basetypes.ObjectType{
AttrTypes: v.AttributeTypes(ctx), AttrTypes: v.AttributeTypes(ctx),
}, },
} }
} }
func (v WriteValue) AttributeTypes(ctx context.Context) map[string]attr.Type { func (v ConnectionInfoValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{ return map[string]attr.Type{
"host": basetypes.StringType{}, "host": basetypes.StringType{},
"port": basetypes.Int64Type{}, "port": basetypes.Int64Type{},

View file

@ -5,23 +5,17 @@ import (
_ "embed" _ "embed"
"fmt" "fmt"
"log" "log"
"math"
"os" "os"
"strconv" "strconv"
"strings" "strings"
"testing" "testing"
"time"
"github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/acctest"
"github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/helper/resource"
"github.com/hashicorp/terraform-plugin-testing/terraform"
"github.com/stackitcloud/stackit-sdk-go/core/config" "github.com/stackitcloud/stackit-sdk-go/core/config"
"github.com/stackitcloud/stackit-sdk-go/core/utils"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
postgresflexalphaInstance "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance" postgresflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/internal/testutils" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/internal/testutils"
// The fwresource import alias is so there is no collision // The fwresource import alias is so there is no collision
@ -32,15 +26,54 @@ import (
const pfx = "stackitprivatepreview_postgresflexalpha" const pfx = "stackitprivatepreview_postgresflexalpha"
var testInstances []string
func init() {
sweeperName := fmt.Sprintf("%s_%s", pfx, "sweeper")
resource.AddTestSweepers(sweeperName, &resource.Sweeper{
Name: sweeperName,
F: func(region string) error {
ctx := context.Background()
apiClientConfigOptions := []config.ConfigurationOption{}
apiClient, err := postgresflexalpha2.NewAPIClient(apiClientConfigOptions...)
if err != nil {
log.Fatalln(err)
}
instances, err := apiClient.ListInstancesRequest(ctx, testutils.ProjectId, testutils.Region).
Size(100).
Execute()
if err != nil {
log.Fatalln(err)
}
for _, inst := range instances.GetInstances() {
if strings.HasPrefix(inst.GetName(), "tf-acc-") {
for _, item := range testInstances {
if inst.GetName() == item {
delErr := apiClient.DeleteInstanceRequestExecute(ctx, testutils.ProjectId, testutils.Region, inst.GetId())
if delErr != nil {
// TODO: maybe just warn?
log.Fatalln(delErr)
}
}
}
}
}
return nil
},
})
}
func TestInstanceResourceSchema(t *testing.T) { func TestInstanceResourceSchema(t *testing.T) {
// t.Parallel() t.Parallel()
ctx := context.Background() ctx := context.Background()
schemaRequest := fwresource.SchemaRequest{} schemaRequest := fwresource.SchemaRequest{}
schemaResponse := &fwresource.SchemaResponse{} schemaResponse := &fwresource.SchemaResponse{}
// Instantiate the resource.Resource and call its Schema method // Instantiate the resource.Resource and call its Schema method
postgresflexalphaInstance.NewInstanceResource().Schema(ctx, schemaRequest, schemaResponse) postgresflexalpha.NewInstanceResource().Schema(ctx, schemaRequest, schemaResponse)
if schemaResponse.Diagnostics.HasError() { if schemaResponse.Diagnostics.HasError() {
t.Fatalf("Schema method diagnostics: %+v", schemaResponse.Diagnostics) t.Fatalf("Schema method diagnostics: %+v", schemaResponse.Diagnostics)
@ -54,6 +87,14 @@ func TestInstanceResourceSchema(t *testing.T) {
} }
} }
var (
//go:embed testdata/resource-no-enc.tf
resourceConfigNoEnc string //nolint:unused // needs implementation
//go:embed testdata/resource-enc.tf
resourceConfigEnc string //nolint:unused // needs implementation
)
func TestMain(m *testing.M) { func TestMain(m *testing.M) {
testutils.Setup() testutils.Setup()
code := m.Run() code := m.Run()
@ -67,23 +108,44 @@ func testAccPreCheck(t *testing.T) {
} }
} }
// func TestAccResourceExample_parallel(t *testing.T) {
// t.Parallel()
//
// exData := resData{
// Region: "eu01",
// ServiceAccountFilePath: sa_file,
// ProjectID: project_id,
// Name: acctest.RandomWithPrefix("tf-acc"),
// }
//
// resource.Test(t, resource.TestCase{
// ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
// Steps: []resource.TestStep{
// {
// Config: testAccResourceEncryptionExampleConfig(exData),
// Check: resource.TestCheckResourceAttrSet("example_resource.test", "id"),
// },
// },
// })
//}
type resData struct { type resData struct {
ServiceAccountFilePath string ServiceAccountFilePath string
ProjectID string ProjectId string
Region string Region string
Name string Name string
TfName string TfName string
FlavorID string FlavorId string
BackupSchedule string BackupSchedule string
UseEncryption bool UseEncryption bool
KekKeyID string KekKeyId string
KekKeyRingID string KekKeyRingId string
KekKeyVersion uint8 KekKeyVersion uint8
KekServiceAccount string KekServiceAccount string
PerformanceClass string PerformanceClass string
Replicas uint32 Replicas uint32
Size uint32 Size uint32
ACLString string AclString string
AccessScope string AccessScope string
RetentionDays uint32 RetentionDays uint32
Version string Version string
@ -93,13 +155,13 @@ type resData struct {
type User struct { type User struct {
Name string Name string
ProjectID string ProjectId string
Roles []string Roles []string
} }
type Database struct { type Database struct {
Name string Name string
ProjectID string ProjectId string
Owner string Owner string
} }
@ -108,17 +170,17 @@ func getExample() resData {
return resData{ return resData{
Region: os.Getenv("TF_ACC_REGION"), Region: os.Getenv("TF_ACC_REGION"),
ServiceAccountFilePath: os.Getenv("TF_ACC_SERVICE_ACCOUNT_FILE"), ServiceAccountFilePath: os.Getenv("TF_ACC_SERVICE_ACCOUNT_FILE"),
ProjectID: os.Getenv("TF_ACC_PROJECT_ID"), ProjectId: os.Getenv("TF_ACC_PROJECT_ID"),
Name: name, Name: name,
TfName: name, TfName: name,
FlavorID: "2.4", FlavorId: "2.4",
BackupSchedule: "0 0 * * *", BackupSchedule: "0 0 * * *",
UseEncryption: false, UseEncryption: false,
RetentionDays: 33, RetentionDays: 33,
Replicas: 1, Replicas: 1,
PerformanceClass: "premium-perf2-stackit", PerformanceClass: "premium-perf2-stackit",
Size: 10, Size: 10,
ACLString: "0.0.0.0/0", AclString: "0.0.0.0/0",
AccessScope: "PUBLIC", AccessScope: "PUBLIC",
Version: "17", Version: "17",
} }
@ -133,103 +195,23 @@ func TestAccInstance(t *testing.T) {
updSizeData := exData updSizeData := exData
updSizeData.Size = 25 updSizeData.Size = 25
updBackupSched := updSizeData resource.ParallelTest(t, resource.TestCase{
// api should complain about more than one daily backup
updBackupSched.BackupSchedule = "30 3 * * *"
/*
{
"backupSchedule": "6 6 * * *",
"flavorId": "1.2",
"name": "postgres-instance",
"network": {
"acl": [
"198.51.100.0/24"
]
},
"replicas": 1,
"retentionDays": 35,
"storage": {
"size": 10
},
"version": "string"
}
*/
testItemID := testutils.ResStr(pfx, "instance", exData.TfName)
resource.ParallelTest(
t, resource.TestCase{
PreCheck: func() { PreCheck: func() {
testAccPreCheck(t) testAccPreCheck(t)
t.Logf(" ... working on instance %s", exData.TfName) t.Logf(" ... working on instance %s", exData.TfName)
testInstances = append(testInstances, exData.TfName)
}, },
CheckDestroy: testAccCheckPostgresFlexDestroy,
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
Steps: []resource.TestStep{ Steps: []resource.TestStep{
// Create and verify // Create and verify
{ {
//PreConfig: func() {
// //
// },
Config: testutils.StringFromTemplateMust( Config: testutils.StringFromTemplateMust(
"testdata/instance_template.gompl", "testdata/instance_template.gompl",
exData, exData,
), ),
Check: resource.ComposeAggregateTestCheckFunc( Check: resource.ComposeAggregateTestCheckFunc(
// check params acl count resource.TestCheckResourceAttr(testutils.ResStr(pfx, "instance", exData.TfName), "name", exData.Name),
resource.TestCheckResourceAttr(testItemID, "acl.#", "1"), resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "instance", exData.TfName), "id"),
// check params are set
resource.TestCheckResourceAttrSet(testItemID, "backup_schedule"),
//// connection_info should contain 1 sub entry
// resource.TestCheckResourceAttr(testItemID, "connection_info.%", "1"),
//
//// connection_info.write should contain 2 sub entries
// resource.TestCheckResourceAttr(testItemID, "connection_info.write", "2"),
//
// resource.TestCheckResourceAttrSet(testItemID, "connection_info.write.host"),
// resource.TestCheckResourceAttrSet(testItemID, "connection_info.write.port"),
resource.TestCheckResourceAttrSet(testItemID, "flavor_id"),
resource.TestCheckResourceAttrSet(testItemID, "id"),
resource.TestCheckResourceAttrSet(testItemID, "instance_id"),
resource.TestCheckResourceAttrSet(testItemID, "is_deletable"),
resource.TestCheckResourceAttrSet(testItemID, "name"),
// network should contain 4 sub entries
resource.TestCheckResourceAttr(testItemID, "network.%", "4"),
resource.TestCheckResourceAttrSet(testItemID, "network.access_scope"),
// on unencrypted instances we expect this to be empty
resource.TestCheckResourceAttr(testItemID, "network.instance_address", ""),
resource.TestCheckResourceAttr(testItemID, "network.router_address", ""),
// only one acl entry should be set
resource.TestCheckResourceAttr(testItemID, "network.acl.#", "1"),
resource.TestCheckResourceAttrSet(testItemID, "replicas"),
resource.TestCheckResourceAttrSet(testItemID, "retention_days"),
resource.TestCheckResourceAttrSet(testItemID, "status"),
// storage should contain 2 sub entries
resource.TestCheckResourceAttr(testItemID, "storage.%", "2"),
resource.TestCheckResourceAttrSet(testItemID, "storage.performance_class"),
resource.TestCheckResourceAttrSet(testItemID, "storage.size"),
resource.TestCheckResourceAttrSet(testItemID, "version"),
// check absent attr
resource.TestCheckNoResourceAttr(testItemID, "encryption"),
resource.TestCheckNoResourceAttr(testItemID, "encryption.kek_key_id"),
resource.TestCheckNoResourceAttr(testItemID, "encryption.kek_key_ring_id"),
resource.TestCheckNoResourceAttr(testItemID, "encryption.kek_key_version"),
resource.TestCheckNoResourceAttr(testItemID, "encryption.service_account"),
// check param values
resource.TestCheckResourceAttr(testItemID, "name", exData.Name),
), ),
}, },
// Update name and verify // Update name and verify
@ -239,11 +221,7 @@ func TestAccInstance(t *testing.T) {
updNameData, updNameData,
), ),
Check: resource.ComposeTestCheckFunc( Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr( resource.TestCheckResourceAttr(testutils.ResStr(pfx, "instance", exData.TfName), "name", updNameData.Name),
testutils.ResStr(pfx, "instance", exData.TfName),
"name",
updNameData.Name,
),
), ),
}, },
// Update size and verify // Update size and verify
@ -260,20 +238,6 @@ func TestAccInstance(t *testing.T) {
), ),
), ),
}, },
// Update backup schedule
{
Config: testutils.StringFromTemplateMust(
"testdata/instance_template.gompl",
updBackupSched,
),
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr(
testutils.ResStr(pfx, "instance", exData.TfName),
"backup_schedule",
updBackupSched.BackupSchedule,
),
),
},
//// Import test //// Import test
//{ //{
// ResourceName: "example_resource.test", // ResourceName: "example_resource.test",
@ -281,8 +245,7 @@ func TestAccInstance(t *testing.T) {
// ImportStateVerify: true, // ImportStateVerify: true,
// }, // },
}, },
}, })
)
} }
func TestAccInstanceWithUsers(t *testing.T) { func TestAccInstanceWithUsers(t *testing.T) {
@ -292,18 +255,17 @@ func TestAccInstanceWithUsers(t *testing.T) {
data.Users = []User{ data.Users = []User{
{ {
Name: userName, Name: userName,
ProjectID: os.Getenv("TF_ACC_PROJECT_ID"), ProjectId: os.Getenv("TF_ACC_PROJECT_ID"),
Roles: []string{"login"}, Roles: []string{"login"},
}, },
} }
resource.ParallelTest( resource.ParallelTest(t, resource.TestCase{
t, resource.TestCase{
PreCheck: func() { PreCheck: func() {
testAccPreCheck(t) testAccPreCheck(t)
t.Logf(" ... working on instance %s", data.TfName) t.Logf(" ... working on instance %s", data.TfName)
testInstances = append(testInstances, data.TfName)
}, },
CheckDestroy: testAccCheckPostgresFlexDestroy,
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
Steps: []resource.TestStep{ Steps: []resource.TestStep{
// Create and verify // Create and verify
@ -313,19 +275,14 @@ func TestAccInstanceWithUsers(t *testing.T) {
data, data,
), ),
Check: resource.ComposeAggregateTestCheckFunc( Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr( resource.TestCheckResourceAttr(testutils.ResStr(pfx, "instance", data.TfName), "name", data.Name),
testutils.ResStr(pfx, "instance", data.TfName),
"name",
data.Name,
),
resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "instance", data.TfName), "id"), resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "instance", data.TfName), "id"),
resource.TestCheckResourceAttr(testutils.ResStr(pfx, "user", userName), "name", userName), resource.TestCheckResourceAttr(testutils.ResStr(pfx, "user", userName), "name", userName),
resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "user", userName), "id"), resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "user", userName), "id"),
), ),
}, },
}, },
}, })
)
} }
func TestAccInstanceWithDatabases(t *testing.T) { func TestAccInstanceWithDatabases(t *testing.T) {
@ -336,7 +293,7 @@ func TestAccInstanceWithDatabases(t *testing.T) {
data.Users = []User{ data.Users = []User{
{ {
Name: userName, Name: userName,
ProjectID: os.Getenv("TF_ACC_PROJECT_ID"), ProjectId: os.Getenv("TF_ACC_PROJECT_ID"),
Roles: []string{"login"}, Roles: []string{"login"},
}, },
} }
@ -344,18 +301,17 @@ func TestAccInstanceWithDatabases(t *testing.T) {
data.Databases = []Database{ data.Databases = []Database{
{ {
Name: dbName, Name: dbName,
ProjectID: os.Getenv("TF_ACC_PROJECT_ID"), ProjectId: os.Getenv("TF_ACC_PROJECT_ID"),
Owner: userName, Owner: userName,
}, },
} }
resource.ParallelTest( resource.ParallelTest(t, resource.TestCase{
t, resource.TestCase{
PreCheck: func() { PreCheck: func() {
testAccPreCheck(t) testAccPreCheck(t)
t.Logf(" ... working on instance %s", data.TfName) t.Logf(" ... working on instance %s", data.TfName)
testInstances = append(testInstances, data.TfName)
}, },
CheckDestroy: testAccCheckPostgresFlexDestroy,
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories, ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
Steps: []resource.TestStep{ Steps: []resource.TestStep{
// Create and verify // Create and verify
@ -365,11 +321,7 @@ func TestAccInstanceWithDatabases(t *testing.T) {
data, data,
), ),
Check: resource.ComposeAggregateTestCheckFunc( Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr( resource.TestCheckResourceAttr(testutils.ResStr(pfx, "instance", data.TfName), "name", data.Name),
testutils.ResStr(pfx, "instance", data.TfName),
"name",
data.Name,
),
resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "instance", data.TfName), "id"), resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "instance", data.TfName), "id"),
resource.TestCheckResourceAttr(testutils.ResStr(pfx, "user", userName), "name", userName), resource.TestCheckResourceAttr(testutils.ResStr(pfx, "user", userName), "name", userName),
resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "user", userName), "id"), resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "user", userName), "id"),
@ -379,95 +331,7 @@ func TestAccInstanceWithDatabases(t *testing.T) {
), ),
}, },
}, },
}, })
)
}
func TestAccEncryptedInstanceWithDatabases(t *testing.T) {
encKekKeyID, ok := os.LookupEnv("TF_ACC_KEK_KEY_ID")
if !ok || encKekKeyID == "" {
t.Skip("env var TF_ACC_KEK_KEY_ID needed for encryption test")
}
encKekKeyRingID, ok := os.LookupEnv("TF_ACC_KEK_KEY_RING_ID")
if !ok || encKekKeyRingID == "" {
t.Skip("env var TF_ACC_KEK_KEY_RING_ID needed for encryption test")
}
encKekKeyVersion, ok := os.LookupEnv("TF_ACC_KEK_KEY_VERSION")
if !ok || encKekKeyVersion == "" {
t.Skip("env var TF_ACC_KEK_KEY_VERSION needed for encryption test")
}
encSvcAcc, ok := os.LookupEnv("TF_ACC_KEK_SERVICE_ACCOUNT")
if !ok || encSvcAcc == "" {
t.Skip("env var TF_ACC_KEK_SERVICE_ACCOUNT needed for encryption test")
}
data := getExample()
data.UseEncryption = true
data.KekKeyID = encKekKeyID
data.KekKeyRingID = encKekKeyRingID
data.KekServiceAccount = encSvcAcc
encKekKeyVersionInt, err := strconv.Atoi(encKekKeyVersion)
if err != nil {
t.Errorf("error converting string to int")
}
if encKekKeyVersionInt > math.MaxUint8 {
t.Errorf("value too large to convert to uint8")
}
data.KekKeyVersion = uint8(encKekKeyVersionInt) //nolint:gosec // handled above
dbName := "testdb"
userName := "testUser"
data.Users = []User{
{
Name: userName,
ProjectID: os.Getenv("TF_ACC_PROJECT_ID"),
Roles: []string{"login"},
},
}
data.Databases = []Database{
{
Name: dbName,
ProjectID: os.Getenv("TF_ACC_PROJECT_ID"),
Owner: userName,
},
}
resource.ParallelTest(
t, resource.TestCase{
PreCheck: func() {
testAccPreCheck(t)
t.Logf(" ... working on instance %s", data.TfName)
},
CheckDestroy: testAccCheckPostgresFlexDestroy,
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
Steps: []resource.TestStep{
// Create and verify
{
Config: testutils.StringFromTemplateMust(
"testdata/instance_template.gompl",
data,
),
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr(
testutils.ResStr(pfx, "instance", data.TfName),
"name",
data.Name,
),
resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "instance", data.TfName), "id"),
resource.TestCheckResourceAttr(testutils.ResStr(pfx, "user", userName), "name", userName),
resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "user", userName), "id"),
resource.TestCheckResourceAttr(testutils.ResStr(pfx, "database", dbName), "name", dbName),
resource.TestCheckResourceAttr(testutils.ResStr(pfx, "database", dbName), "owner", userName),
resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "database", dbName), "id"),
),
},
},
},
)
} }
// func setupMockServer() *httptest.Server { // func setupMockServer() *httptest.Server {
@ -509,6 +373,19 @@ func TestAccEncryptedInstanceWithDatabases(t *testing.T) {
// // Run unit tests against mock // // Run unit tests against mock
//} //}
// type postgresFlexClientMocked struct {
// returnError bool
// getFlavorsResp *postgresflex.GetFlavorsResponse
// }
//
// func (c *postgresFlexClientMocked) ListFlavorsExecute(_ context.Context, _, _ string) (*postgresflex.GetFlavorsResponse, error) {
// if c.returnError {
// return nil, fmt.Errorf("get flavors failed")
// }
//
// return c.getFlavorsResp, nil
// }
// func TestNewInstanceResource(t *testing.T) { // func TestNewInstanceResource(t *testing.T) {
// exData := resData{ // exData := resData{
// Region: "eu01", // Region: "eu01",
@ -1122,87 +999,3 @@ func TestAccEncryptedInstanceWithDatabases(t *testing.T) {
// } // }
// return nil // return nil
//} //}
func testAccCheckPostgresFlexDestroy(s *terraform.State) error {
testutils.Setup()
pID, ok := os.LookupEnv("TF_ACC_PROJECT_ID")
if !ok {
log.Fatalln("unable to read TF_ACC_PROJECT_ID")
}
ctx := context.Background()
var client *v3alpha1api.APIClient
var err error
var region, projectID string
region = testutils.Region
if region == "" {
region = "eu01"
}
projectID = pID
if projectID == "" {
return fmt.Errorf("projectID could not be determined in destroy function")
}
apiClientConfigOptions := []config.ConfigurationOption{
config.WithServiceAccountKeyPath(os.Getenv("TF_ACC_SERVICE_ACCOUNT_FILE")),
config.WithRegion(region),
}
if testutils.PostgresFlexCustomEndpoint != "" {
apiClientConfigOptions = append(
apiClientConfigOptions,
config.WithEndpoint(testutils.PostgresFlexCustomEndpoint),
)
}
client, err = v3alpha1api.NewAPIClient(apiClientConfigOptions...)
if err != nil {
log.Fatalln(err)
}
instancesToDestroy := []string{}
for _, rs := range s.RootModule().Resources {
if rs.Type != "stackitprivatepreview_postgresflexalpha_instance" &&
rs.Type != "stackitprivatepreview_postgresflexbeta_instance" {
continue
}
// instance terraform ID: = "[project_id],[region],[instance_id]"
instanceID := strings.Split(rs.Primary.ID, core.Separator)[2]
instancesToDestroy = append(instancesToDestroy, instanceID)
}
instancesResp, err := client.DefaultAPI.ListInstancesRequest(ctx, projectID, region).
Size(100).
Execute()
if err != nil {
return fmt.Errorf("getting instancesResp: %w", err)
}
items := instancesResp.GetInstances()
for i := range items {
if items[i].Id == "" {
continue
}
if utils.Contains(instancesToDestroy, items[i].Id) {
err := client.DefaultAPI.DeleteInstanceRequest(ctx, testutils.ProjectId, region, items[i].Id).Execute()
if err != nil {
return fmt.Errorf("deleting instance %s during CheckDestroy: %w", items[i].Id, err)
}
err = postgresflexalpha.DeleteInstanceWaitHandler(
ctx,
client.DefaultAPI,
testutils.ProjectId,
testutils.Region,
items[i].Id,
15*time.Minute,
10*time.Second,
)
if err != nil {
return fmt.Errorf("deleting instance %s during CheckDestroy: waiting for deletion %w", items[i].Id, err)
}
}
}
return nil
}

View file

@ -4,11 +4,11 @@ provider "stackitprivatepreview" {
} }
resource "stackitprivatepreview_postgresflexalpha_instance" "{{ .TfName }}" { resource "stackitprivatepreview_postgresflexalpha_instance" "{{ .TfName }}" {
project_id = "{{ .ProjectID }}" project_id = "{{ .ProjectId }}"
name = "{{ .Name }}" name = "{{ .Name }}"
backup_schedule = "{{ .BackupSchedule }}" backup_schedule = "{{ .BackupSchedule }}"
retention_days = {{ .RetentionDays }} retention_days = {{ .RetentionDays }}
flavor_id = "{{ .FlavorID }}" flavor_id = "{{ .FlavorId }}"
replicas = {{ .Replicas }} replicas = {{ .Replicas }}
storage = { storage = {
performance_class = "{{ .PerformanceClass }}" performance_class = "{{ .PerformanceClass }}"
@ -16,14 +16,14 @@ resource "stackitprivatepreview_postgresflexalpha_instance" "{{ .TfName }}" {
} }
{{ if .UseEncryption }} {{ if .UseEncryption }}
encryption = { encryption = {
kek_key_id = "{{ .KekKeyID }}" kek_key_id = {{ .KekKeyId }}
kek_key_ring_id = "{{ .KekKeyRingID }}" kek_key_ring_id = {{ .KekKeyRingId }}
kek_key_version = {{ .KekKeyVersion }} kek_key_version = {{ .KekKeyVersion }}
service_account = "{{ .KekServiceAccount }}" service_account = "{{ .KekServiceAccount }}"
} }
{{ end }} {{ end }}
network = { network = {
acl = ["{{ .ACLString }}"] acl = ["{{ .AclString }}"]
access_scope = "{{ .AccessScope }}" access_scope = "{{ .AccessScope }}"
} }
version = {{ .Version }} version = {{ .Version }}
@ -33,7 +33,7 @@ resource "stackitprivatepreview_postgresflexalpha_instance" "{{ .TfName }}" {
{{ $tfName := .TfName }} {{ $tfName := .TfName }}
{{ range $user := .Users }} {{ range $user := .Users }}
resource "stackitprivatepreview_postgresflexalpha_user" "{{ $user.Name }}" { resource "stackitprivatepreview_postgresflexalpha_user" "{{ $user.Name }}" {
project_id = "{{ $user.ProjectID }}" project_id = "{{ $user.ProjectId }}"
instance_id = stackitprivatepreview_postgresflexalpha_instance.{{ $tfName }}.instance_id instance_id = stackitprivatepreview_postgresflexalpha_instance.{{ $tfName }}.instance_id
name = "{{ $user.Name }}" name = "{{ $user.Name }}"
roles = [{{ range $i, $v := $user.Roles }}{{if $i}},{{end}}"{{$v}}"{{end}}] roles = [{{ range $i, $v := $user.Roles }}{{if $i}},{{end}}"{{$v}}"{{end}}]
@ -45,7 +45,7 @@ resource "stackitprivatepreview_postgresflexalpha_user" "{{ $user.Name }}" {
{{ $tfName := .TfName }} {{ $tfName := .TfName }}
{{ range $db := .Databases }} {{ range $db := .Databases }}
resource "stackitprivatepreview_postgresflexalpha_database" "{{ $db.Name }}" { resource "stackitprivatepreview_postgresflexalpha_database" "{{ $db.Name }}" {
project_id = "{{ $db.ProjectID }}" project_id = "{{ $db.ProjectId }}"
instance_id = stackitprivatepreview_postgresflexalpha_instance.{{ $tfName }}.instance_id instance_id = stackitprivatepreview_postgresflexalpha_instance.{{ $tfName }}.instance_id
name = "{{ $db.Name }}" name = "{{ $db.Name }}"
owner = stackitprivatepreview_postgresflexalpha_user.{{ $db.Owner }}.name owner = stackitprivatepreview_postgresflexalpha_user.{{ $db.Owner }}.name

View file

@ -0,0 +1,27 @@
variable "project_id" {}
variable "kek_key_id" {}
variable "kek_key_ring_id" {}
resource "stackitprivatepreview_postgresflexalpha_instance" "msh-instance-only" {
project_id = var.project_id
name = "example-instance"
backup_schedule = "0 0 * * *"
retention_days = 30
flavor_id = "2.4"
replicas = 1
storage = {
performance_class = "premium-perf2-stackit"
size = 10
}
encryption = {
kek_key_id = var.kek_key_id
kek_key_ring_id = var.kek_key_ring_id
kek_key_version = 1
service_account = "service@account.email"
}
network = {
acl = ["0.0.0.0/0"]
access_scope = "PUBLIC"
}
version = 17
}

View file

@ -0,0 +1,19 @@
variable "project_id" {}
resource "stackitprivatepreview_postgresflexalpha_instance" "msh-instance-only" {
project_id = var.project_id
name = "example-instance"
backup_schedule = "0 0 * * *"
retention_days = 30
flavor_id = "2.4"
replicas = 1
storage = {
performance_class = "premium-perf2-stackit"
size = 10
}
network = {
acl = ["0.0.0.0/0"]
access_scope = "PUBLIC"
}
version = 17
}

View file

@ -8,8 +8,8 @@ import (
"github.com/hashicorp/terraform-plugin-framework/datasource/schema" "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
"github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
postgresflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user/datasources_gen" postgresflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user/datasources_gen"
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils" postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
@ -40,7 +40,7 @@ type dataSourceModel struct {
// userDataSource is the data source implementation. // userDataSource is the data source implementation.
type userDataSource struct { type userDataSource struct {
client *v3alpha1api.APIClient client *postgresflex.APIClient
providerData core.ProviderData providerData core.ProviderData
} }
@ -101,24 +101,24 @@ func (r *userDataSource) Read(
ctx = core.InitProviderContext(ctx) ctx = core.InitProviderContext(ctx)
projectID := model.ProjectId.ValueString() projectId := model.ProjectId.ValueString()
instanceID := model.InstanceId.ValueString() instanceId := model.InstanceId.ValueString()
userID64 := model.UserId.ValueInt64() userId64 := model.UserId.ValueInt64()
if userID64 > math.MaxInt32 { if userId64 > math.MaxInt32 {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (userId)") core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (userId)")
return return
} }
userID := int32(userID64) // nolint:gosec // check is performed above userId := int32(userId64) // nolint:gosec // check is performed above
region := r.providerData.GetRegionWithOverride(model.Region) region := r.providerData.GetRegionWithOverride(model.Region)
ctx = tflog.SetField(ctx, "project_id", projectID) ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "instance_id", instanceID) ctx = tflog.SetField(ctx, "instance_id", instanceId)
ctx = tflog.SetField(ctx, "region", region) ctx = tflog.SetField(ctx, "region", region)
ctx = tflog.SetField(ctx, "user_id", userID) ctx = tflog.SetField(ctx, "user_id", userId)
recordSetResp, err := r.client.DefaultAPI.GetUserRequest(ctx, projectID, region, instanceID, userID).Execute() recordSetResp, err := r.client.GetUserRequest(ctx, projectId, region, instanceId, userId).Execute()
if err != nil { if err != nil {
handleReadError(ctx, &diags, err, projectID, instanceID, userID) handleReadError(ctx, &diags, err, projectId, instanceId, userId)
resp.State.RemoveResource(ctx) resp.State.RemoveResource(ctx)
return return
} }
@ -151,8 +151,8 @@ func handleReadError(
ctx context.Context, ctx context.Context,
diags *diag.Diagnostics, diags *diag.Diagnostics,
err error, err error,
projectID, instanceID string, projectId, instanceId string,
userID int32, userId int32,
) { ) {
utils.LogError( utils.LogError(
ctx, ctx,
@ -161,23 +161,23 @@ func handleReadError(
"Reading user", "Reading user",
fmt.Sprintf( fmt.Sprintf(
"User with ID %q or instance with ID %q does not exist in project %q.", "User with ID %q or instance with ID %q does not exist in project %q.",
userID, userId,
instanceID, instanceId,
projectID, projectId,
), ),
map[int]string{ map[int]string{
http.StatusBadRequest: fmt.Sprintf( http.StatusBadRequest: fmt.Sprintf(
"Invalid user request parameters for project %q and instance %q.", "Invalid user request parameters for project %q and instance %q.",
projectID, projectId,
instanceID, instanceId,
), ),
http.StatusNotFound: fmt.Sprintf( http.StatusNotFound: fmt.Sprintf(
"User, instance %q, or project %q or user %q not found.", "User, instance %q, or project %q or user %q not found.",
instanceID, instanceId,
projectID, projectId,
userID, userId,
), ),
http.StatusForbidden: fmt.Sprintf("Forbidden access to project %q.", projectID), http.StatusForbidden: fmt.Sprintf("Forbidden access to project %q.", projectId),
}, },
) )
} }

View file

@ -6,14 +6,14 @@ import (
"github.com/hashicorp/terraform-plugin-framework/attr" "github.com/hashicorp/terraform-plugin-framework/attr"
"github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-framework/types"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
) )
// mapDataSourceFields maps API response to data source model, preserving existing ID. // mapDataSourceFields maps API response to data source model, preserving existing ID.
func mapDataSourceFields(userResp *v3alpha1api.GetUserResponse, model *dataSourceModel, region string) error { func mapDataSourceFields(userResp *postgresflex.GetUserResponse, model *dataSourceModel, region string) error {
if userResp == nil { if userResp == nil {
return fmt.Errorf("response is nil") return fmt.Errorf("response is nil")
} }
@ -22,24 +22,27 @@ func mapDataSourceFields(userResp *v3alpha1api.GetUserResponse, model *dataSourc
} }
user := userResp user := userResp
var userID int64 var userId int64
if model.UserId.ValueInt64() == 0 { if model.UserId.ValueInt64() != 0 {
userId = model.UserId.ValueInt64()
} else if user.Id != nil {
userId = *user.Id
} else {
return fmt.Errorf("user id not present") return fmt.Errorf("user id not present")
} }
userID = model.UserId.ValueInt64()
model.TerraformID = utils.BuildInternalTerraformId( model.TerraformID = utils.BuildInternalTerraformId(
model.ProjectId.ValueString(), region, model.InstanceId.ValueString(), strconv.FormatInt(userID, 10), model.ProjectId.ValueString(), region, model.InstanceId.ValueString(), strconv.FormatInt(userId, 10),
) )
model.UserId = types.Int64Value(userID) model.UserId = types.Int64Value(userId)
model.Name = types.StringValue(user.GetName()) model.Name = types.StringValue(user.GetName())
if user.Roles == nil { if user.Roles == nil {
model.Roles = types.List(types.SetNull(types.StringType)) model.Roles = types.List(types.SetNull(types.StringType))
} else { } else {
var roles []attr.Value var roles []attr.Value
for _, role := range user.Roles { for _, role := range *user.Roles {
roles = append(roles, types.StringValue(string(role))) roles = append(roles, types.StringValue(string(role)))
} }
rolesSet, diags := types.SetValue(types.StringType, roles) rolesSet, diags := types.SetValue(types.StringType, roles)
@ -49,24 +52,24 @@ func mapDataSourceFields(userResp *v3alpha1api.GetUserResponse, model *dataSourc
model.Roles = types.List(rolesSet) model.Roles = types.List(rolesSet)
} }
model.Id = types.Int64Value(userID) model.Id = types.Int64Value(userId)
model.Region = types.StringValue(region) model.Region = types.StringValue(region)
model.Status = types.StringValue(user.GetStatus()) model.Status = types.StringValue(user.GetStatus())
return nil return nil
} }
// toPayloadRoles converts a string slice to the API's role type. // toPayloadRoles converts a string slice to the API's role type.
func toPayloadRoles(roles []string) []v3alpha1api.UserRole { func toPayloadRoles(roles *[]string) *[]postgresflex.UserRole {
var userRoles = make([]v3alpha1api.UserRole, 0, len(roles)) var userRoles = make([]postgresflex.UserRole, 0, len(*roles))
for _, role := range roles { for _, role := range *roles {
userRoles = append(userRoles, v3alpha1api.UserRole(role)) userRoles = append(userRoles, postgresflex.UserRole(role))
} }
return userRoles return &userRoles
} }
// toUpdatePayload creates an API update payload from the resource model. // toUpdatePayload creates an API update payload from the resource model.
func toUpdatePayload(model *resourceModel, roles []string) ( func toUpdatePayload(model *resourceModel, roles *[]string) (
*v3alpha1api.UpdateUserRequestPayload, *postgresflex.UpdateUserRequestPayload,
error, error,
) { ) {
if model == nil { if model == nil {
@ -76,14 +79,14 @@ func toUpdatePayload(model *resourceModel, roles []string) (
return nil, fmt.Errorf("nil roles") return nil, fmt.Errorf("nil roles")
} }
return &v3alpha1api.UpdateUserRequestPayload{ return &postgresflex.UpdateUserRequestPayload{
Name: model.Name.ValueStringPointer(), Name: model.Name.ValueStringPointer(),
Roles: toPayloadRoles(roles), Roles: toPayloadRoles(roles),
}, nil }, nil
} }
// toCreatePayload creates an API create payload from the resource model. // toCreatePayload creates an API create payload from the resource model.
func toCreatePayload(model *resourceModel, roles []string) (*v3alpha1api.CreateUserRequestPayload, error) { func toCreatePayload(model *resourceModel, roles *[]string) (*postgresflex.CreateUserRequestPayload, error) {
if model == nil { if model == nil {
return nil, fmt.Errorf("nil model") return nil, fmt.Errorf("nil model")
} }
@ -91,14 +94,14 @@ func toCreatePayload(model *resourceModel, roles []string) (*v3alpha1api.CreateU
return nil, fmt.Errorf("nil roles") return nil, fmt.Errorf("nil roles")
} }
return &v3alpha1api.CreateUserRequestPayload{ return &postgresflex.CreateUserRequestPayload{
Roles: toPayloadRoles(roles), Roles: toPayloadRoles(roles),
Name: model.Name.ValueString(), Name: model.Name.ValueStringPointer(),
}, nil }, nil
} }
// mapResourceFields maps API response to the resource model, preserving existing ID. // mapResourceFields maps API response to the resource model, preserving existing ID.
func mapResourceFields(userResp *v3alpha1api.GetUserResponse, model *resourceModel, region string) error { func mapResourceFields(userResp *postgresflex.GetUserResponse, model *resourceModel, region string) error {
if userResp == nil { if userResp == nil {
return fmt.Errorf("response is nil") return fmt.Errorf("response is nil")
} }
@ -107,24 +110,24 @@ func mapResourceFields(userResp *v3alpha1api.GetUserResponse, model *resourceMod
} }
user := userResp user := userResp
var userID int64 var userId int64
if !model.UserId.IsNull() && !model.UserId.IsUnknown() && model.UserId.ValueInt64() != 0 { if !model.UserId.IsNull() && !model.UserId.IsUnknown() && model.UserId.ValueInt64() != 0 {
userID = model.UserId.ValueInt64() userId = model.UserId.ValueInt64()
} else if user.Id != 0 { } else if user.Id != nil {
userID = int64(user.Id) userId = *user.Id
} else { } else {
return fmt.Errorf("user id not present") return fmt.Errorf("user id not present")
} }
model.Id = types.Int64Value(userID) model.Id = types.Int64Value(userId)
model.UserId = types.Int64Value(userID) model.UserId = types.Int64Value(userId)
model.Name = types.StringValue(user.Name) model.Name = types.StringPointerValue(user.Name)
if user.Roles == nil { if user.Roles == nil {
model.Roles = types.List(types.SetNull(types.StringType)) model.Roles = types.List(types.SetNull(types.StringType))
} else { } else {
var roles []attr.Value var roles []attr.Value
for _, role := range user.Roles { for _, role := range *user.Roles {
roles = append(roles, types.StringValue(string(role))) roles = append(roles, types.StringValue(string(role)))
} }
rolesSet, diags := types.SetValue(types.StringType, roles) rolesSet, diags := types.SetValue(types.StringType, roles)
@ -134,6 +137,6 @@ func mapResourceFields(userResp *v3alpha1api.GetUserResponse, model *resourceMod
model.Roles = types.List(rolesSet) model.Roles = types.List(rolesSet)
} }
model.Region = types.StringValue(region) model.Region = types.StringValue(region)
model.Status = types.StringValue(user.Status) model.Status = types.StringPointerValue(user.Status)
return nil return nil
} }

View file

@ -8,8 +8,7 @@ import (
"github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-framework/types"
"github.com/stackitcloud/stackit-sdk-go/core/utils" "github.com/stackitcloud/stackit-sdk-go/core/utils"
postgresflex "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api" postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
data "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user/datasources_gen" data "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user/datasources_gen"
) )
@ -44,12 +43,12 @@ func TestMapDataSourceFields(t *testing.T) {
{ {
"simple_values", "simple_values",
&postgresflex.GetUserResponse{ &postgresflex.GetUserResponse{
Roles: []postgresflex.UserRole{ Roles: &[]postgresflex.UserRole{
"role_1", "role_1",
"role_2", "role_2",
"", "",
}, },
Name: "username", Name: utils.Ptr("username"),
}, },
testRegion, testRegion,
dataSourceModel{ dataSourceModel{
@ -78,10 +77,10 @@ func TestMapDataSourceFields(t *testing.T) {
{ {
"null_fields_and_int_conversions", "null_fields_and_int_conversions",
&postgresflex.GetUserResponse{ &postgresflex.GetUserResponse{
Id: int32(1), Id: utils.Ptr(int64(1)),
Roles: []postgresflex.UserRole{}, Roles: &[]postgresflex.UserRole{},
Name: "", Name: nil,
Status: "status", Status: utils.Ptr("status"),
}, },
testRegion, testRegion,
dataSourceModel{ dataSourceModel{
@ -161,7 +160,7 @@ func TestMapFieldsCreate(t *testing.T) {
{ {
"default_values", "default_values",
&postgresflex.GetUserResponse{ &postgresflex.GetUserResponse{
Id: int32(1), Id: utils.Ptr(int64(1)),
}, },
testRegion, testRegion,
resourceModel{ resourceModel{
@ -169,11 +168,11 @@ func TestMapFieldsCreate(t *testing.T) {
UserId: types.Int64Value(1), UserId: types.Int64Value(1),
InstanceId: types.StringValue("iid"), InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"), ProjectId: types.StringValue("pid"),
Name: types.StringValue(""), Name: types.StringNull(),
Roles: types.List(types.SetNull(types.StringType)), Roles: types.List(types.SetNull(types.StringType)),
Password: types.StringNull(), Password: types.StringNull(),
Region: types.StringValue(testRegion), Region: types.StringValue(testRegion),
Status: types.StringValue(""), Status: types.StringNull(),
//ConnectionString: types.StringNull(), //ConnectionString: types.StringNull(),
}, },
true, true,
@ -181,9 +180,9 @@ func TestMapFieldsCreate(t *testing.T) {
{ {
"simple_values", "simple_values",
&postgresflex.GetUserResponse{ &postgresflex.GetUserResponse{
Id: int32(1), Id: utils.Ptr(int64(1)),
Name: "username", Name: utils.Ptr("username"),
Status: "status", Status: utils.Ptr("status"),
}, },
testRegion, testRegion,
resourceModel{ resourceModel{
@ -203,9 +202,9 @@ func TestMapFieldsCreate(t *testing.T) {
{ {
"null_fields_and_int_conversions", "null_fields_and_int_conversions",
&postgresflex.GetUserResponse{ &postgresflex.GetUserResponse{
Id: int32(1), Id: utils.Ptr(int64(1)),
Name: "", Name: nil,
Status: "", Status: nil,
}, },
testRegion, testRegion,
resourceModel{ resourceModel{
@ -213,11 +212,11 @@ func TestMapFieldsCreate(t *testing.T) {
UserId: types.Int64Value(1), UserId: types.Int64Value(1),
InstanceId: types.StringValue("iid"), InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"), ProjectId: types.StringValue("pid"),
Name: types.StringValue(""), Name: types.StringNull(),
Roles: types.List(types.SetNull(types.StringType)), Roles: types.List(types.SetNull(types.StringType)),
Password: types.StringNull(), Password: types.StringNull(),
Region: types.StringValue(testRegion), Region: types.StringValue(testRegion),
Status: types.StringValue(""), Status: types.StringNull(),
//ConnectionString: types.StringNull(), //ConnectionString: types.StringNull(),
}, },
true, true,
@ -260,7 +259,7 @@ func TestMapFieldsCreate(t *testing.T) {
t.Fatalf("Should not have failed: %v", err) t.Fatalf("Should not have failed: %v", err)
} }
if tt.isValid { if tt.isValid {
diff := cmp.Diff(&tt.expected, state) diff := cmp.Diff(state, &tt.expected)
if diff != "" { if diff != "" {
t.Fatalf("Data does not match: %s", diff) t.Fatalf("Data does not match: %s", diff)
} }
@ -282,7 +281,7 @@ func TestMapFields(t *testing.T) {
{ {
"default_values", "default_values",
&postgresflex.GetUserResponse{ &postgresflex.GetUserResponse{
Id: int32(1), Id: utils.Ptr(int64(1)),
}, },
testRegion, testRegion,
resourceModel{ resourceModel{
@ -290,10 +289,10 @@ func TestMapFields(t *testing.T) {
UserId: types.Int64Value(int64(1)), UserId: types.Int64Value(int64(1)),
InstanceId: types.StringValue("iid"), InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"), ProjectId: types.StringValue("pid"),
Name: types.StringValue(""), Name: types.StringNull(),
Roles: types.List(types.SetNull(types.StringType)), Roles: types.List(types.SetNull(types.StringType)),
Region: types.StringValue(testRegion), Region: types.StringValue(testRegion),
Status: types.StringValue(""), Status: types.StringNull(),
//ConnectionString: types.StringNull(), //ConnectionString: types.StringNull(),
}, },
true, true,
@ -301,13 +300,13 @@ func TestMapFields(t *testing.T) {
{ {
"simple_values", "simple_values",
&postgresflex.GetUserResponse{ &postgresflex.GetUserResponse{
Id: int32(1), Id: utils.Ptr(int64(1)),
Roles: []postgresflex.UserRole{ Roles: &[]postgresflex.UserRole{
"role_1", "role_1",
"role_2", "role_2",
"", "",
}, },
Name: "username", Name: utils.Ptr("username"),
}, },
testRegion, testRegion,
resourceModel{ resourceModel{
@ -326,7 +325,7 @@ func TestMapFields(t *testing.T) {
), ),
), ),
Region: types.StringValue(testRegion), Region: types.StringValue(testRegion),
Status: types.StringValue(""), Status: types.StringNull(),
//ConnectionString: types.StringNull(), //ConnectionString: types.StringNull(),
}, },
true, true,
@ -334,8 +333,8 @@ func TestMapFields(t *testing.T) {
{ {
"null_fields_and_int_conversions", "null_fields_and_int_conversions",
&postgresflex.GetUserResponse{ &postgresflex.GetUserResponse{
Id: int32(1), Id: utils.Ptr(int64(1)),
Name: "", Name: nil,
}, },
testRegion, testRegion,
resourceModel{ resourceModel{
@ -343,10 +342,10 @@ func TestMapFields(t *testing.T) {
UserId: types.Int64Value(1), UserId: types.Int64Value(1),
InstanceId: types.StringValue("iid"), InstanceId: types.StringValue("iid"),
ProjectId: types.StringValue("pid"), ProjectId: types.StringValue("pid"),
Name: types.StringValue(""), Name: types.StringNull(),
Roles: types.List(types.SetNull(types.StringType)), Roles: types.List(types.SetNull(types.StringType)),
Region: types.StringValue(testRegion), Region: types.StringValue(testRegion),
Status: types.StringValue(""), Status: types.StringNull(),
//ConnectionString: types.StringNull(), //ConnectionString: types.StringNull(),
}, },
true, true,
@ -402,17 +401,17 @@ func TestToCreatePayload(t *testing.T) {
tests := []struct { tests := []struct {
description string description string
input *resourceModel input *resourceModel
inputRoles []string inputRoles *[]string
expected *postgresflex.CreateUserRequestPayload expected *postgresflex.CreateUserRequestPayload
isValid bool isValid bool
}{ }{
{ {
"default_values", "default_values",
&resourceModel{}, &resourceModel{},
[]string{}, &[]string{},
&postgresflex.CreateUserRequestPayload{ &postgresflex.CreateUserRequestPayload{
Name: "", Name: nil,
Roles: []postgresflex.UserRole{}, Roles: &[]postgresflex.UserRole{},
}, },
true, true,
}, },
@ -421,13 +420,13 @@ func TestToCreatePayload(t *testing.T) {
&resourceModel{ &resourceModel{
Name: types.StringValue("username"), Name: types.StringValue("username"),
}, },
[]string{ &[]string{
"role_1", "role_1",
"role_2", "role_2",
}, },
&postgresflex.CreateUserRequestPayload{ &postgresflex.CreateUserRequestPayload{
Name: "username", Name: utils.Ptr("username"),
Roles: []postgresflex.UserRole{ Roles: &[]postgresflex.UserRole{
"role_1", "role_1",
"role_2", "role_2",
}, },
@ -439,21 +438,21 @@ func TestToCreatePayload(t *testing.T) {
&resourceModel{ &resourceModel{
Name: types.StringNull(), Name: types.StringNull(),
}, },
[]string{ &[]string{
"", "",
}, },
&postgresflex.CreateUserRequestPayload{ &postgresflex.CreateUserRequestPayload{
Roles: []postgresflex.UserRole{ Roles: &[]postgresflex.UserRole{
"", "",
}, },
Name: "", Name: nil,
}, },
true, true,
}, },
{ {
"nil_model", "nil_model",
nil, nil,
[]string{}, &[]string{},
nil, nil,
false, false,
}, },
@ -490,16 +489,16 @@ func TestToUpdatePayload(t *testing.T) {
tests := []struct { tests := []struct {
description string description string
input *resourceModel input *resourceModel
inputRoles []string inputRoles *[]string
expected *postgresflex.UpdateUserRequestPayload expected *postgresflex.UpdateUserRequestPayload
isValid bool isValid bool
}{ }{
{ {
"default_values", "default_values",
&resourceModel{}, &resourceModel{},
[]string{}, &[]string{},
&postgresflex.UpdateUserRequestPayload{ &postgresflex.UpdateUserRequestPayload{
Roles: []postgresflex.UserRole{}, Roles: &[]postgresflex.UserRole{},
}, },
true, true,
}, },
@ -508,13 +507,13 @@ func TestToUpdatePayload(t *testing.T) {
&resourceModel{ &resourceModel{
Name: types.StringValue("username"), Name: types.StringValue("username"),
}, },
[]string{ &[]string{
"role_1", "role_1",
"role_2", "role_2",
}, },
&postgresflex.UpdateUserRequestPayload{ &postgresflex.UpdateUserRequestPayload{
Name: utils.Ptr("username"), Name: utils.Ptr("username"),
Roles: []postgresflex.UserRole{ Roles: &[]postgresflex.UserRole{
"role_1", "role_1",
"role_2", "role_2",
}, },
@ -526,11 +525,11 @@ func TestToUpdatePayload(t *testing.T) {
&resourceModel{ &resourceModel{
Name: types.StringNull(), Name: types.StringNull(),
}, },
[]string{ &[]string{
"", "",
}, },
&postgresflex.UpdateUserRequestPayload{ &postgresflex.UpdateUserRequestPayload{
Roles: []postgresflex.UserRole{ Roles: &[]postgresflex.UserRole{
"", "",
}, },
}, },
@ -539,7 +538,7 @@ func TestToUpdatePayload(t *testing.T) {
{ {
"nil_model", "nil_model",
nil, nil,
[]string{}, &[]string{},
nil, nil,
false, false,
}, },

View file

@ -12,8 +12,8 @@ import (
"github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/hashicorp/terraform-plugin-framework/resource/identityschema" "github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
postgresflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user/resources_gen" postgresflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user/resources_gen"
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils" postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
postgresflexalphaWait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/postgresflexalpha" postgresflexalphaWait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/postgresflexalpha"
@ -60,7 +60,7 @@ type UserResourceIdentityModel struct {
// userResource implements the resource handling for a PostgreSQL Flex user. // userResource implements the resource handling for a PostgreSQL Flex user.
type userResource struct { type userResource struct {
client *v3alpha1api.APIClient client *postgresflex.APIClient
providerData core.ProviderData providerData core.ProviderData
} }
@ -189,8 +189,8 @@ func (r *userResource) Create(
ctx = core.InitProviderContext(ctx) ctx = core.InitProviderContext(ctx)
arg := &clientArg{ arg := &clientArg{
projectID: model.ProjectId.ValueString(), projectId: model.ProjectId.ValueString(),
instanceID: model.InstanceId.ValueString(), instanceId: model.InstanceId.ValueString(),
region: r.providerData.GetRegionWithOverride(model.Region), region: r.providerData.GetRegionWithOverride(model.Region),
} }
@ -202,18 +202,18 @@ func (r *userResource) Create(
} }
// Generate API request body from model // Generate API request body from model
payload, err := toCreatePayload(&model, roles) payload, err := toCreatePayload(&model, &roles)
if err != nil { if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating user", fmt.Sprintf("Creating API payload: %v", err)) core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating user", fmt.Sprintf("Creating API payload: %v", err))
return return
} }
// Create new user // Create new user
userResp, err := r.client.DefaultAPI.CreateUserRequest( userResp, err := r.client.CreateUserRequest(
ctx, ctx,
arg.projectID, arg.projectId,
arg.region, arg.region,
arg.instanceID, arg.instanceId,
).CreateUserRequestPayload(*payload).Execute() ).CreateUserRequestPayload(*payload).Execute()
if err != nil { if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating user", fmt.Sprintf("Calling API: %v", err)) core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating user", fmt.Sprintf("Calling API: %v", err))
@ -221,7 +221,7 @@ func (r *userResource) Create(
} }
id, ok := userResp.GetIdOk() id, ok := userResp.GetIdOk()
if !ok || *id == 0 { if !ok || id == 0 {
core.LogAndAddError( core.LogAndAddError(
ctx, ctx,
&resp.Diagnostics, &resp.Diagnostics,
@ -230,7 +230,7 @@ func (r *userResource) Create(
) )
return return
} }
arg.userID = int64(*id) arg.userId = id
ctx = tflog.SetField(ctx, "user_id", id) ctx = tflog.SetField(ctx, "user_id", id)
@ -238,28 +238,29 @@ func (r *userResource) Create(
// Set data returned by API in identity // Set data returned by API in identity
identity := UserResourceIdentityModel{ identity := UserResourceIdentityModel{
ProjectID: types.StringValue(arg.projectID), ProjectID: types.StringValue(arg.projectId),
Region: types.StringValue(arg.region), Region: types.StringValue(arg.region),
InstanceID: types.StringValue(arg.instanceID), InstanceID: types.StringValue(arg.instanceId),
UserID: types.Int64Value(int64(*id)), UserID: types.Int64Value(id),
} }
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
if resp.Diagnostics.HasError() { if resp.Diagnostics.HasError() {
return return
} }
model.Id = types.Int64Value(int64(*id)) model.Id = types.Int64Value(id)
model.UserId = types.Int64Value(int64(*id)) model.UserId = types.Int64Value(id)
model.Password = types.StringValue(userResp.GetPassword()) model.Password = types.StringValue(userResp.GetPassword())
model.Status = types.StringValue(userResp.GetStatus()) model.Status = types.StringValue(userResp.GetStatus())
//model.ConnectionString = types.StringValue(userResp.GetConnectionString())
waitResp, err := postgresflexalphaWait.GetUserByIdWaitHandler( waitResp, err := postgresflexalphaWait.GetUserByIdWaitHandler(
ctx, ctx,
r.client.DefaultAPI, r.client,
arg.projectID, arg.projectId,
arg.instanceID, arg.instanceId,
arg.region, arg.region,
int64(*id), id,
).SetSleepBeforeWait( ).SetSleepBeforeWait(
10 * time.Second, 10 * time.Second,
).SetTimeout( ).SetTimeout(
@ -276,7 +277,7 @@ func (r *userResource) Create(
return return
} }
if waitResp.Id == 0 { if waitResp.Id == nil {
core.LogAndAddError( core.LogAndAddError(
ctx, ctx,
&resp.Diagnostics, &resp.Diagnostics,
@ -285,7 +286,7 @@ func (r *userResource) Create(
) )
return return
} }
if waitResp.Id != *id { if waitResp.Id == nil || *waitResp.Id != id {
core.LogAndAddError( core.LogAndAddError(
ctx, ctx,
&resp.Diagnostics, &resp.Diagnostics,
@ -324,8 +325,8 @@ func (r *userResource) Read(
ctx = core.InitProviderContext(ctx) ctx = core.InitProviderContext(ctx)
arg := &clientArg{ arg := &clientArg{
projectID: model.ProjectId.ValueString(), projectId: model.ProjectId.ValueString(),
instanceID: model.InstanceId.ValueString(), instanceId: model.InstanceId.ValueString(),
region: r.providerData.GetRegionWithOverride(model.Region), region: r.providerData.GetRegionWithOverride(model.Region),
} }
@ -336,9 +337,9 @@ func (r *userResource) Read(
// Read resource state // Read resource state
waitResp, err := postgresflexalphaWait.GetUserByIdWaitHandler( waitResp, err := postgresflexalphaWait.GetUserByIdWaitHandler(
ctx, ctx,
r.client.DefaultAPI, r.client,
arg.projectID, arg.projectId,
arg.instanceID, arg.instanceId,
arg.region, arg.region,
model.UserId.ValueInt64(), model.UserId.ValueInt64(),
).SetSleepBeforeWait( ).SetSleepBeforeWait(
@ -357,7 +358,7 @@ func (r *userResource) Read(
return return
} }
if int64(waitResp.Id) != model.UserId.ValueInt64() { if waitResp.Id == nil || *waitResp.Id != model.UserId.ValueInt64() {
core.LogAndAddError( core.LogAndAddError(
ctx, ctx,
&resp.Diagnostics, &resp.Diagnostics,
@ -366,16 +367,16 @@ func (r *userResource) Read(
) )
return return
} }
arg.userID = int64(waitResp.Id) arg.userId = *waitResp.Id
ctx = core.LogResponse(ctx) ctx = core.LogResponse(ctx)
// Set data returned by API in identity // Set data returned by API in identity
identity := UserResourceIdentityModel{ identity := UserResourceIdentityModel{
ProjectID: types.StringValue(arg.projectID), ProjectID: types.StringValue(arg.projectId),
Region: types.StringValue(arg.region), Region: types.StringValue(arg.region),
InstanceID: types.StringValue(arg.instanceID), InstanceID: types.StringValue(arg.instanceId),
UserID: types.Int64Value(arg.userID), UserID: types.Int64Value(arg.userId),
} }
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
if resp.Diagnostics.HasError() { if resp.Diagnostics.HasError() {
@ -407,8 +408,8 @@ func (r *userResource) Update(
ctx = core.InitProviderContext(ctx) ctx = core.InitProviderContext(ctx)
arg := &clientArg{ arg := &clientArg{
projectID: model.ProjectId.ValueString(), projectId: model.ProjectId.ValueString(),
instanceID: model.InstanceId.ValueString(), instanceId: model.InstanceId.ValueString(),
region: r.providerData.GetRegionWithOverride(model.Region), region: r.providerData.GetRegionWithOverride(model.Region),
} }
@ -429,26 +430,26 @@ func (r *userResource) Update(
} }
// Generate API request body from model // Generate API request body from model
payload, err := toUpdatePayload(&model, roles) payload, err := toUpdatePayload(&model, &roles)
if err != nil { if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating user", fmt.Sprintf("Updating API payload: %v", err)) core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating user", fmt.Sprintf("Updating API payload: %v", err))
return return
} }
userID64 := arg.userID userId64 := arg.userId
if userID64 > math.MaxInt32 { if userId64 > math.MaxInt32 {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (userId)") core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (userId)")
return return
} }
userID := int32(userID64) // nolint:gosec // check is performed above userId := int32(userId64) // nolint:gosec // check is performed above
// Update existing instance // Update existing instance
err = r.client.DefaultAPI.UpdateUserRequest( err = r.client.UpdateUserRequest(
ctx, ctx,
arg.projectID, arg.projectId,
arg.region, arg.region,
arg.instanceID, arg.instanceId,
userID, userId,
).UpdateUserRequestPayload(*payload).Execute() ).UpdateUserRequestPayload(*payload).Execute()
if err != nil { if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating user", err.Error()) core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating user", err.Error())
@ -459,10 +460,10 @@ func (r *userResource) Update(
// Set data returned by API in identity // Set data returned by API in identity
identity := UserResourceIdentityModel{ identity := UserResourceIdentityModel{
ProjectID: types.StringValue(arg.projectID), ProjectID: types.StringValue(arg.projectId),
Region: types.StringValue(arg.region), Region: types.StringValue(arg.region),
InstanceID: types.StringValue(arg.instanceID), InstanceID: types.StringValue(arg.instanceId),
UserID: types.Int64Value(userID64), UserID: types.Int64Value(userId64),
} }
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
if resp.Diagnostics.HasError() { if resp.Diagnostics.HasError() {
@ -472,9 +473,9 @@ func (r *userResource) Update(
// Verify update // Verify update
waitResp, err := postgresflexalphaWait.GetUserByIdWaitHandler( waitResp, err := postgresflexalphaWait.GetUserByIdWaitHandler(
ctx, ctx,
r.client.DefaultAPI, r.client,
arg.projectID, arg.projectId,
arg.instanceID, arg.instanceId,
arg.region, arg.region,
model.UserId.ValueInt64(), model.UserId.ValueInt64(),
).SetSleepBeforeWait( ).SetSleepBeforeWait(
@ -493,7 +494,7 @@ func (r *userResource) Update(
return return
} }
if int64(waitResp.Id) != model.UserId.ValueInt64() { if waitResp.Id == nil || *waitResp.Id != model.UserId.ValueInt64() {
core.LogAndAddError( core.LogAndAddError(
ctx, ctx,
&resp.Diagnostics, &resp.Diagnostics,
@ -502,7 +503,7 @@ func (r *userResource) Update(
) )
return return
} }
arg.userID = int64(waitResp.Id) arg.userId = *waitResp.Id
// Set state to fully populated data // Set state to fully populated data
diags = resp.State.Set(ctx, stateModel) diags = resp.State.Set(ctx, stateModel)
@ -547,15 +548,15 @@ func (r *userResource) Delete(
ctx = r.setTFLogFields(ctx, arg) ctx = r.setTFLogFields(ctx, arg)
ctx = core.InitProviderContext(ctx) ctx = core.InitProviderContext(ctx)
userID64 := arg.userID userId64 := arg.userId
if userID64 > math.MaxInt32 { if userId64 > math.MaxInt32 {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (userId)") core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (userId)")
return return
} }
userID := int32(userID64) // nolint:gosec // check is performed above userId := int32(userId64) // nolint:gosec // check is performed above
// Delete existing record set // Delete existing record set
err := r.client.DefaultAPI.DeleteUserRequest(ctx, arg.projectID, arg.region, arg.instanceID, userID).Execute() err := r.client.DeleteUserRequest(ctx, arg.projectId, arg.region, arg.instanceId, userId).Execute()
if err != nil { if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting user", fmt.Sprintf("Calling API: %v", err)) core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting user", fmt.Sprintf("Calling API: %v", err))
} }
@ -571,7 +572,7 @@ func (r *userResource) Delete(
// if exists { // if exists {
// core.LogAndAddError( // core.LogAndAddError(
// ctx, &resp.Diagnostics, "Error deleting user", // ctx, &resp.Diagnostics, "Error deleting user",
// fmt.Sprintf("User ID '%v' resource still exists after deletion", model.UserId.ValueInt32()), // fmt.Sprintf("User ID '%v' resource still exists after deletion", model.UserId.ValueInt64()),
// ) // )
// return // return
//} //}
@ -607,10 +608,10 @@ func (r *userResource) IdentitySchema(
// clientArg holds the arguments for API calls. // clientArg holds the arguments for API calls.
type clientArg struct { type clientArg struct {
projectID string projectId string
instanceID string instanceId string
region string region string
userID int64 userId int64
} }
// ImportState imports a resource into the Terraform state on success. // ImportState imports a resource into the Terraform state on success.
@ -637,7 +638,7 @@ func (r *userResource) ImportState(
return return
} }
userID, err := strconv.ParseInt(idParts[3], 10, 64) userId, err := strconv.ParseInt(idParts[3], 10, 64)
if err != nil { if err != nil {
core.LogAndAddError( core.LogAndAddError(
ctx, ctx,
@ -651,7 +652,7 @@ func (r *userResource) ImportState(
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...) resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...) resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...) resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userID)...) resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userId)...)
tflog.Info(ctx, "Postgres Flex user state imported") tflog.Info(ctx, "Postgres Flex user state imported")
@ -665,15 +666,15 @@ func (r *userResource) ImportState(
return return
} }
projectID := identityData.ProjectID.ValueString() projectId := identityData.ProjectID.ValueString()
region := identityData.Region.ValueString() region := identityData.Region.ValueString()
instanceID := identityData.InstanceID.ValueString() instanceId := identityData.InstanceID.ValueString()
userID := identityData.UserID.ValueInt64() userId := identityData.UserID.ValueInt64()
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectID)...) resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...) resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceID)...) resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...)
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userID)...) resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userId)...)
tflog.Info(ctx, "Postgres Flex user state imported") tflog.Info(ctx, "Postgres Flex user state imported")
} }
@ -683,24 +684,25 @@ func (r *userResource) extractIdentityData(
model resourceModel, model resourceModel,
identity UserResourceIdentityModel, identity UserResourceIdentityModel,
) (*clientArg, error) { ) (*clientArg, error) {
var projectID, region, instanceID string var projectId, region, instanceId string
var userID int64 var userId int64
if !model.UserId.IsNull() && !model.UserId.IsUnknown() { if !model.UserId.IsNull() && !model.UserId.IsUnknown() {
userID = model.UserId.ValueInt64() userId = model.UserId.ValueInt64()
} else { } else {
if identity.UserID.IsNull() || identity.UserID.IsUnknown() { if identity.UserID.IsNull() || identity.UserID.IsUnknown() {
return nil, fmt.Errorf("user_id not found in config") return nil, fmt.Errorf("user_id not found in config")
} }
userID = identity.UserID.ValueInt64() userId = identity.UserID.ValueInt64()
} }
if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() { if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() {
projectID = model.ProjectId.ValueString() projectId = model.ProjectId.ValueString()
} else { } else {
if identity.ProjectID.IsNull() || identity.ProjectID.IsUnknown() { if identity.ProjectID.IsNull() || identity.ProjectID.IsUnknown() {
return nil, fmt.Errorf("project_id not found in config") return nil, fmt.Errorf("project_id not found in config")
} }
projectID = identity.ProjectID.ValueString() projectId = identity.ProjectID.ValueString()
} }
if !model.Region.IsNull() && !model.Region.IsUnknown() { if !model.Region.IsNull() && !model.Region.IsUnknown() {
@ -713,27 +715,27 @@ func (r *userResource) extractIdentityData(
} }
if !model.InstanceId.IsNull() && !model.InstanceId.IsUnknown() { if !model.InstanceId.IsNull() && !model.InstanceId.IsUnknown() {
instanceID = model.InstanceId.ValueString() instanceId = model.InstanceId.ValueString()
} else { } else {
if identity.InstanceID.IsNull() || identity.InstanceID.IsUnknown() { if identity.InstanceID.IsNull() || identity.InstanceID.IsUnknown() {
return nil, fmt.Errorf("instance_id not found in config") return nil, fmt.Errorf("instance_id not found in config")
} }
instanceID = identity.InstanceID.ValueString() instanceId = identity.InstanceID.ValueString()
} }
return &clientArg{ return &clientArg{
projectID: projectID, projectId: projectId,
instanceID: instanceID, instanceId: instanceId,
region: region, region: region,
userID: userID, userId: userId,
}, nil }, nil
} }
// setTFLogFields adds relevant fields to the context for terraform logging purposes. // setTFLogFields adds relevant fields to the context for terraform logging purposes.
func (r *userResource) setTFLogFields(ctx context.Context, arg *clientArg) context.Context { func (r *userResource) setTFLogFields(ctx context.Context, arg *clientArg) context.Context {
ctx = tflog.SetField(ctx, "project_id", arg.projectID) ctx = tflog.SetField(ctx, "project_id", arg.projectId)
ctx = tflog.SetField(ctx, "instance_id", arg.instanceID) ctx = tflog.SetField(ctx, "instance_id", arg.instanceId)
ctx = tflog.SetField(ctx, "region", arg.region) ctx = tflog.SetField(ctx, "region", arg.region)
ctx = tflog.SetField(ctx, "user_id", arg.userID) ctx = tflog.SetField(ctx, "user_id", arg.userId)
return ctx return ctx
} }

View file

@ -9,7 +9,7 @@ import (
"github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/stackitcloud/stackit-sdk-go/core/config" "github.com/stackitcloud/stackit-sdk-go/core/config"
postgresflex "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api" postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"

View file

@ -15,7 +15,7 @@ import (
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api" postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
) )
const ( const (
@ -38,7 +38,7 @@ func TestConfigureClient(t *testing.T) {
name string name string
args args args args
wantErr bool wantErr bool
expected *v3alpha1api.APIClient expected *postgresflex.APIClient
}{ }{
{ {
name: "default endpoint", name: "default endpoint",
@ -47,8 +47,8 @@ func TestConfigureClient(t *testing.T) {
Version: testVersion, Version: testVersion,
}, },
}, },
expected: func() *v3alpha1api.APIClient { expected: func() *postgresflex.APIClient {
apiClient, err := v3alpha1api.NewAPIClient( apiClient, err := postgresflex.NewAPIClient(
config.WithRegion("eu01"), config.WithRegion("eu01"),
utils.UserAgentConfigOption(testVersion), utils.UserAgentConfigOption(testVersion),
) )
@ -67,8 +67,8 @@ func TestConfigureClient(t *testing.T) {
PostgresFlexCustomEndpoint: testCustomEndpoint, PostgresFlexCustomEndpoint: testCustomEndpoint,
}, },
}, },
expected: func() *v3alpha1api.APIClient { expected: func() *postgresflex.APIClient {
apiClient, err := v3alpha1api.NewAPIClient( apiClient, err := postgresflex.NewAPIClient(
utils.UserAgentConfigOption(testVersion), utils.UserAgentConfigOption(testVersion),
config.WithEndpoint(testCustomEndpoint), config.WithEndpoint(testCustomEndpoint),
) )

View file

@ -16,8 +16,7 @@ import (
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
sqlserverflexalphaPkg "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3alpha1api" sqlserverflexalphaPkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
sqlserverflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/database/datasources_gen" sqlserverflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/database/datasources_gen"
) )
@ -120,7 +119,7 @@ func (d *databaseDataSource) Read(ctx context.Context, req datasource.ReadReques
databaseName := data.DatabaseName.ValueString() databaseName := data.DatabaseName.ValueString()
databaseResp, err := d.client.DefaultAPI.GetDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute() databaseResp, err := d.client.GetDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute()
if err != nil { if err != nil {
handleReadError(ctx, &resp.Diagnostics, err, projectId, instanceId) handleReadError(ctx, &resp.Diagnostics, err, projectId, instanceId)
resp.State.RemoveResource(ctx) resp.State.RemoveResource(ctx)
@ -143,7 +142,8 @@ func (d *databaseDataSource) Read(ctx context.Context, req datasource.ReadReques
// Save data into Terraform state // Save data into Terraform state
resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
tflog.Info(ctx, "SQL Server Flex Alpha database read") tflog.Info(ctx, "SQL Server Flex beta database read")
} }
// handleReadError centralizes API error handling for the Read operation. // handleReadError centralizes API error handling for the Read operation.

View file

@ -5,10 +5,8 @@ import (
"strings" "strings"
"github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-framework/types"
coreUtils "github.com/stackitcloud/stackit-sdk-go/core/utils"
sqlserverflexalpha "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3alpha1api"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
) )
@ -17,7 +15,7 @@ func mapFields(source *sqlserverflexalpha.GetDatabaseResponse, model *dataSource
if source == nil { if source == nil {
return fmt.Errorf("response is nil") return fmt.Errorf("response is nil")
} }
if source.Id == 0 { if source.Id == nil || *source.Id == 0 {
return fmt.Errorf("id not present") return fmt.Errorf("id not present")
} }
if model == nil { if model == nil {
@ -27,8 +25,8 @@ func mapFields(source *sqlserverflexalpha.GetDatabaseResponse, model *dataSource
var databaseId int64 var databaseId int64
if model.Id.ValueInt64() != 0 { if model.Id.ValueInt64() != 0 {
databaseId = model.Id.ValueInt64() databaseId = model.Id.ValueInt64()
} else if source.Id != 0 { } else if source.Id != nil {
databaseId = source.Id databaseId = *source.Id
} else { } else {
return fmt.Errorf("database id not present") return fmt.Errorf("database id not present")
} }
@ -40,7 +38,7 @@ func mapFields(source *sqlserverflexalpha.GetDatabaseResponse, model *dataSource
model.Region = types.StringValue(region) model.Region = types.StringValue(region)
model.ProjectId = types.StringValue(model.ProjectId.ValueString()) model.ProjectId = types.StringValue(model.ProjectId.ValueString())
model.InstanceId = types.StringValue(model.InstanceId.ValueString()) model.InstanceId = types.StringValue(model.InstanceId.ValueString())
model.CompatibilityLevel = types.Int64Value(int64(source.GetCompatibilityLevel())) model.CompatibilityLevel = types.Int64Value(source.GetCompatibilityLevel())
model.CollationName = types.StringValue(source.GetCollationName()) model.CollationName = types.StringValue(source.GetCollationName())
model.TerraformId = utils.BuildInternalTerraformId( model.TerraformId = utils.BuildInternalTerraformId(
@ -58,7 +56,7 @@ func mapResourceFields(source *sqlserverflexalpha.GetDatabaseResponse, model *re
if source == nil { if source == nil {
return fmt.Errorf("response is nil") return fmt.Errorf("response is nil")
} }
if source.Id == 0 { if source.Id == nil || *source.Id == 0 {
return fmt.Errorf("id not present") return fmt.Errorf("id not present")
} }
if model == nil { if model == nil {
@ -68,8 +66,8 @@ func mapResourceFields(source *sqlserverflexalpha.GetDatabaseResponse, model *re
var databaseId int64 var databaseId int64
if model.Id.ValueInt64() != 0 { if model.Id.ValueInt64() != 0 {
databaseId = model.Id.ValueInt64() databaseId = model.Id.ValueInt64()
} else if source.Id != 0 { } else if source.Id != nil {
databaseId = source.Id databaseId = *source.Id
} else { } else {
return fmt.Errorf("database id not present") return fmt.Errorf("database id not present")
} }
@ -82,8 +80,8 @@ func mapResourceFields(source *sqlserverflexalpha.GetDatabaseResponse, model *re
model.ProjectId = types.StringValue(model.ProjectId.ValueString()) model.ProjectId = types.StringValue(model.ProjectId.ValueString())
model.InstanceId = types.StringValue(model.InstanceId.ValueString()) model.InstanceId = types.StringValue(model.InstanceId.ValueString())
model.Compatibility = types.Int64Value(int64(source.GetCompatibilityLevel())) model.Compatibility = types.Int64Value(source.GetCompatibilityLevel())
model.CompatibilityLevel = types.Int64Value(int64(source.GetCompatibilityLevel())) model.CompatibilityLevel = types.Int64Value(source.GetCompatibilityLevel())
model.Collation = types.StringValue(source.GetCollationName()) // it does not come back from api model.Collation = types.StringValue(source.GetCollationName()) // it does not come back from api
model.CollationName = types.StringValue(source.GetCollationName()) model.CollationName = types.StringValue(source.GetCollationName())
@ -98,9 +96,9 @@ func toCreatePayload(model *resourceModel) (*sqlserverflexalpha.CreateDatabaseRe
} }
return &sqlserverflexalpha.CreateDatabaseRequestPayload{ return &sqlserverflexalpha.CreateDatabaseRequestPayload{
Name: model.Name.ValueString(), Name: model.Name.ValueStringPointer(),
Owner: model.Owner.ValueString(), Owner: model.Owner.ValueStringPointer(),
Collation: model.Collation.ValueStringPointer(), Collation: model.Collation.ValueStringPointer(),
Compatibility: coreUtils.Ptr(int32(model.Compatibility.ValueInt64())), //nolint:gosec // TODO Compatibility: model.Compatibility.ValueInt64Pointer(),
}, nil }, nil
} }

View file

@ -6,8 +6,8 @@ import (
"github.com/google/go-cmp/cmp" "github.com/google/go-cmp/cmp"
"github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-framework/types"
"github.com/stackitcloud/stackit-sdk-go/core/utils" "github.com/stackitcloud/stackit-sdk-go/core/utils"
sqlserverflexalpha "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3alpha1api"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
datasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/database/datasources_gen" datasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/database/datasources_gen"
) )
@ -31,11 +31,11 @@ func TestMapFields(t *testing.T) {
name: "should map fields correctly", name: "should map fields correctly",
given: given{ given: given{
source: &sqlserverflexalpha.GetDatabaseResponse{ source: &sqlserverflexalpha.GetDatabaseResponse{
Id: (int64(1)), Id: utils.Ptr(int64(1)),
Name: ("my-db"), Name: utils.Ptr("my-db"),
CollationName: ("collation"), CollationName: utils.Ptr("collation"),
CompatibilityLevel: (int32(150)), CompatibilityLevel: utils.Ptr(int64(150)),
Owner: ("my-owner"), Owner: utils.Ptr("my-owner"),
}, },
model: &dataSourceModel{ model: &dataSourceModel{
DatabaseModel: datasource.DatabaseModel{ DatabaseModel: datasource.DatabaseModel{
@ -73,7 +73,7 @@ func TestMapFields(t *testing.T) {
{ {
name: "should fail on nil source ID", name: "should fail on nil source ID",
given: given{ given: given{
source: &sqlserverflexalpha.GetDatabaseResponse{Id: 0}, source: &sqlserverflexalpha.GetDatabaseResponse{Id: nil},
model: &dataSourceModel{}, model: &dataSourceModel{},
}, },
expected: expected{err: true}, expected: expected{err: true},
@ -81,7 +81,7 @@ func TestMapFields(t *testing.T) {
{ {
name: "should fail on nil model", name: "should fail on nil model",
given: given{ given: given{
source: &sqlserverflexalpha.GetDatabaseResponse{Id: (int64(1))}, source: &sqlserverflexalpha.GetDatabaseResponse{Id: utils.Ptr(int64(1))},
model: nil, model: nil,
}, },
expected: expected{err: true}, expected: expected{err: true},
@ -125,9 +125,9 @@ func TestMapResourceFields(t *testing.T) {
name: "should map fields correctly", name: "should map fields correctly",
given: given{ given: given{
source: &sqlserverflexalpha.GetDatabaseResponse{ source: &sqlserverflexalpha.GetDatabaseResponse{
Id: (int64(1)), Id: utils.Ptr(int64(1)),
Name: ("my-db"), Name: utils.Ptr("my-db"),
Owner: ("my-owner"), Owner: utils.Ptr("my-owner"),
}, },
model: &resourceModel{ model: &resourceModel{
ProjectId: types.StringValue("my-project"), ProjectId: types.StringValue("my-project"),
@ -202,9 +202,8 @@ func TestToCreatePayload(t *testing.T) {
}, },
expected: expected{ expected: expected{
payload: &sqlserverflexalpha.CreateDatabaseRequestPayload{ payload: &sqlserverflexalpha.CreateDatabaseRequestPayload{
Name: "my-db", Name: utils.Ptr("my-db"),
Owner: "my-owner", Owner: utils.Ptr("my-owner"),
Compatibility: utils.Ptr(int32(0)),
}, },
}, },
}, },

View file

@ -16,10 +16,8 @@ import (
"github.com/hashicorp/terraform-plugin-log/tflog" "github.com/hashicorp/terraform-plugin-log/tflog"
"github.com/stackitcloud/stackit-sdk-go/core/config" "github.com/stackitcloud/stackit-sdk-go/core/config"
"github.com/stackitcloud/stackit-sdk-go/core/oapierror" "github.com/stackitcloud/stackit-sdk-go/core/oapierror"
coreUtils "github.com/stackitcloud/stackit-sdk-go/core/utils"
sqlserverflexalpha "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3alpha1api"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
wait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexalpha" wait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexalpha"
@ -38,6 +36,10 @@ var (
// Define errors // Define errors
errDatabaseNotFound = errors.New("database not found") errDatabaseNotFound = errors.New("database not found")
// Error message constants
extractErrorSummary = "extracting failed"
extractErrorMessage = "Extracting identity data: %v"
) )
func NewDatabaseResource() resource.Resource { func NewDatabaseResource() resource.Resource {
@ -178,13 +180,33 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques
} }
if !data.Compatibility.IsNull() && !data.Compatibility.IsUnknown() { if !data.Compatibility.IsNull() && !data.Compatibility.IsUnknown() {
payLoad.Compatibility = coreUtils.Ptr(int32(data.Compatibility.ValueInt64())) //nolint:gosec // TODO payLoad.Compatibility = data.Compatibility.ValueInt64Pointer()
} }
payLoad.Name = data.Name.ValueString() payLoad.Name = data.Name.ValueStringPointer()
payLoad.Owner = data.Owner.ValueString() payLoad.Owner = data.Owner.ValueStringPointer()
createResp, err := r.client.DefaultAPI.CreateDatabaseRequest(ctx, projectId, region, instanceId). //_, err := wait.WaitForUserWaitHandler(
// ctx,
// r.client,
// projectId,
// instanceId,
// region,
// data.Owner.ValueString(),
//).
// SetSleepBeforeWait(10 * time.Second).
// WaitWithContext(ctx)
//if err != nil {
// core.LogAndAddError(
// ctx,
// &resp.Diagnostics,
// createErr,
// fmt.Sprintf("Calling API: %v", err),
// )
// return
//}
createResp, err := r.client.CreateDatabaseRequest(ctx, projectId, region, instanceId).
CreateDatabaseRequestPayload(payLoad). CreateDatabaseRequestPayload(payLoad).
Execute() Execute()
if err != nil { if err != nil {
@ -197,7 +219,7 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques
return return
} }
if createResp == nil || createResp.Id == 0 { if createResp == nil || createResp.Id == nil {
core.LogAndAddError( core.LogAndAddError(
ctx, ctx,
&resp.Diagnostics, &resp.Diagnostics,
@ -207,7 +229,7 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques
return return
} }
databaseId := createResp.Id databaseId := *createResp.Id
ctx = tflog.SetField(ctx, "database_id", databaseId) ctx = tflog.SetField(ctx, "database_id", databaseId)
@ -228,7 +250,7 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques
// TODO: is this necessary to wait for the database-> API say 200 ? // TODO: is this necessary to wait for the database-> API say 200 ?
waitResp, err := wait.CreateDatabaseWaitHandler( waitResp, err := wait.CreateDatabaseWaitHandler(
ctx, ctx,
r.client.DefaultAPI, r.client,
projectId, projectId,
instanceId, instanceId,
region, region,
@ -248,7 +270,7 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques
return return
} }
if waitResp.Id == 0 { if waitResp.Id == nil {
core.LogAndAddError( core.LogAndAddError(
ctx, ctx,
&resp.Diagnostics, &resp.Diagnostics,
@ -258,7 +280,7 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques
return return
} }
if waitResp.Id != databaseId { if *waitResp.Id != databaseId {
core.LogAndAddError( core.LogAndAddError(
ctx, ctx,
&resp.Diagnostics, &resp.Diagnostics,
@ -268,7 +290,7 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques
return return
} }
if waitResp.Owner != data.Owner.ValueString() { if *waitResp.Owner != data.Owner.ValueString() {
core.LogAndAddError( core.LogAndAddError(
ctx, ctx,
&resp.Diagnostics, &resp.Diagnostics,
@ -278,7 +300,7 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques
return return
} }
if waitResp.Name != data.Name.ValueString() { if *waitResp.Name != data.Name.ValueString() {
core.LogAndAddError( core.LogAndAddError(
ctx, ctx,
&resp.Diagnostics, &resp.Diagnostics,
@ -288,7 +310,7 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques
return return
} }
database, err := r.client.DefaultAPI.GetDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute() database, err := r.client.GetDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute()
if err != nil { if err != nil {
core.LogAndAddError( core.LogAndAddError(
ctx, ctx,
@ -330,6 +352,13 @@ func (r *databaseResource) Read(ctx context.Context, req resource.ReadRequest, r
return return
} }
// Read identity data
var identityData DatabaseResourceIdentityModel
resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
if resp.Diagnostics.HasError() {
return
}
ctx = core.InitProviderContext(ctx) ctx = core.InitProviderContext(ctx)
projectId := model.ProjectId.ValueString() projectId := model.ProjectId.ValueString()
@ -342,7 +371,7 @@ func (r *databaseResource) Read(ctx context.Context, req resource.ReadRequest, r
ctx = tflog.SetField(ctx, "region", region) ctx = tflog.SetField(ctx, "region", region)
ctx = tflog.SetField(ctx, "database_name", databaseName) ctx = tflog.SetField(ctx, "database_name", databaseName)
databaseResp, err := r.client.DefaultAPI.GetDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute() databaseResp, err := r.client.GetDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute()
if err != nil { if err != nil {
oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
if (ok && oapiErr.StatusCode == http.StatusNotFound) || errors.Is(err, errDatabaseNotFound) { if (ok && oapiErr.StatusCode == http.StatusNotFound) || errors.Is(err, errDatabaseNotFound) {
@ -422,16 +451,14 @@ func (r *databaseResource) Delete(ctx context.Context, req resource.DeleteReques
ctx = tflog.SetField(ctx, "database_name", databaseName) ctx = tflog.SetField(ctx, "database_name", databaseName)
// Delete existing record set // Delete existing record set
err := r.client.DefaultAPI.DeleteDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute() err := r.client.DeleteDatabaseRequestExecute(ctx, projectId, region, instanceId, databaseName)
if err != nil { if err != nil {
core.LogAndAddError( core.LogAndAddError(
ctx, ctx,
&resp.Diagnostics, &resp.Diagnostics,
"Error deleting database", "Error deleting database",
fmt.Sprintf( fmt.Sprintf(
"Calling API: %v\nname: %s, region: %s, instanceId: %s", err, databaseName, region, instanceId, "Calling API: %v\nname: %s, region: %s, instanceId: %s", err, databaseName, region, instanceId))
),
)
return return
} }
@ -448,6 +475,7 @@ func (r *databaseResource) ModifyPlan(
req resource.ModifyPlanRequest, req resource.ModifyPlanRequest,
resp *resource.ModifyPlanResponse, resp *resource.ModifyPlanResponse,
) { // nolint:gocritic // function signature required by Terraform ) { // nolint:gocritic // function signature required by Terraform
// skip initial empty configuration to avoid follow-up errors // skip initial empty configuration to avoid follow-up errors
if req.Config.Raw.IsNull() { if req.Config.Raw.IsNull() {
return return

View file

@ -16,8 +16,7 @@ import (
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
sqlserverflexalphaPkg "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3alpha1api" sqlserverflexalphaPkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
sqlserverflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/flavor/datasources_gen" sqlserverflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/flavor/datasources_gen"
) )
@ -274,7 +273,7 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest,
ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "region", region) ctx = tflog.SetField(ctx, "region", region)
flavors, err := getAllFlavors(ctx, r.client.DefaultAPI, projectId, region) flavors, err := getAllFlavors(ctx, r.client, projectId, region)
if err != nil { if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading flavors", fmt.Sprintf("getAllFlavors: %v", err)) core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading flavors", fmt.Sprintf("getAllFlavors: %v", err))
return return
@ -282,17 +281,17 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest,
var foundFlavors []sqlserverflexalphaPkg.ListFlavors var foundFlavors []sqlserverflexalphaPkg.ListFlavors
for _, flavor := range flavors { for _, flavor := range flavors {
if model.Cpu.ValueInt64() != flavor.Cpu { if model.Cpu.ValueInt64() != *flavor.Cpu {
continue continue
} }
if model.Memory.ValueInt64() != flavor.Memory { if model.Memory.ValueInt64() != *flavor.Memory {
continue continue
} }
if model.NodeType.ValueString() != flavor.NodeType { if model.NodeType.ValueString() != *flavor.NodeType {
continue continue
} }
for _, sc := range flavor.StorageClasses { for _, sc := range *flavor.StorageClasses {
if model.StorageClass.ValueString() != sc.Class { if model.StorageClass.ValueString() != *sc.Class {
continue continue
} }
foundFlavors = append(foundFlavors, flavor) foundFlavors = append(foundFlavors, flavor)
@ -308,11 +307,11 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest,
} }
f := foundFlavors[0] f := foundFlavors[0]
model.Description = types.StringValue(f.Description) model.Description = types.StringValue(*f.Description)
model.Id = utils.BuildInternalTerraformId(model.ProjectId.ValueString(), region, f.Id) model.Id = utils.BuildInternalTerraformId(model.ProjectId.ValueString(), region, *f.Id)
model.FlavorId = types.StringValue(f.Id) model.FlavorId = types.StringValue(*f.Id)
model.MaxGb = types.Int64Value(int64(f.MaxGB)) model.MaxGb = types.Int64Value(*f.MaxGB)
model.MinGb = types.Int64Value(int64(f.MinGB)) model.MinGb = types.Int64Value(*f.MinGB)
if f.StorageClasses == nil { if f.StorageClasses == nil {
model.StorageClasses = types.ListNull(sqlserverflexalphaGen.StorageClassesType{ model.StorageClasses = types.ListNull(sqlserverflexalphaGen.StorageClassesType{
@ -322,15 +321,15 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest,
}) })
} else { } else {
var scList []attr.Value var scList []attr.Value
for _, sc := range f.StorageClasses { for _, sc := range *f.StorageClasses {
scList = append( scList = append(
scList, scList,
sqlserverflexalphaGen.NewStorageClassesValueMust( sqlserverflexalphaGen.NewStorageClassesValueMust(
sqlserverflexalphaGen.StorageClassesValue{}.AttributeTypes(ctx), sqlserverflexalphaGen.StorageClassesValue{}.AttributeTypes(ctx),
map[string]attr.Value{ map[string]attr.Value{
"class": types.StringValue(sc.Class), "class": types.StringValue(*sc.Class),
"max_io_per_sec": types.Int64Value(int64(sc.MaxIoPerSec)), "max_io_per_sec": types.Int64Value(*sc.MaxIoPerSec),
"max_through_in_mb": types.Int64Value(int64(sc.MaxThroughInMb)), "max_through_in_mb": types.Int64Value(*sc.MaxThroughInMb),
}, },
), ),
) )

Some files were not shown because too many files have changed in this diff Show more