diff --git a/.github/actions/acc_test/README.md b/.github/actions/acc_test/README.md
deleted file mode 100644
index c3484cf2..00000000
--- a/.github/actions/acc_test/README.md
+++ /dev/null
@@ -1 +0,0 @@
-# acceptance test action
diff --git a/.github/actions/acc_test/action.yaml b/.github/actions/acc_test/action.yaml
deleted file mode 100644
index ccd08969..00000000
--- a/.github/actions/acc_test/action.yaml
+++ /dev/null
@@ -1,262 +0,0 @@
-name: Acceptance Testing
-description: "Acceptance Testing pipeline"
-
-inputs:
- test_timeout_string:
- description: "string that determines the timeout (default: 45m)"
- default: '45m'
- required: true
-
- go-version:
- description: "go version to install"
- default: '1.25'
- required: true
-
- project_id:
- description: "STACKIT project ID for tests"
- required: true
-
- project_user_email:
- required: true
- description: "project user email for acc testing"
-
- tf_acc_kek_key_id:
- description: "KEK key ID"
- required: true
-
- tf_acc_kek_key_ring_id:
- description: "KEK key ring ID"
- required: true
-
- tf_acc_kek_key_version:
- description: "KEK key version"
- required: true
-
- tf_acc_kek_service_account:
- description: "KEK service account email"
- required: true
-
- region:
- description: "STACKIT region for tests"
- default: 'eu01'
- required: true
-
- service_account_json_content:
- description: "STACKIT service account JSON file contents"
- required: true
- default: ""
-
- service_account_json_content_b64:
- description: "STACKIT service account JSON file contents"
- required: true
- default: ""
-
- service_account_json_file_path:
- description: "STACKIT service account JSON file contents"
- required: true
- default: 'service_account.json'
-
- test_file:
- description: "testfile to run"
- default: ''
-
-
-#outputs:
-# random-number:
-# description: "Random number"
-# value: ${{ steps.random-number-generator.outputs.random-number }}
-
-runs:
- using: "composite"
- steps:
-# - name: Random Number Generator
-# id: random-number-generator
-# run: echo "random-number=$(echo $RANDOM)" >> $GITHUB_OUTPUT
-# shell: bash
-
- - name: Install needed tools
- shell: bash
- run: |
- echo "::group::apt install"
- set -e
- apt-get -y -qq update >apt_update.log 2>apt_update_err.log
- if [ $? -ne 0 ]; then
- cat apt_update.log apt_update_err.log
- fi
- apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget >apt_get.log 2>apt_get_err.log
- if [ $? -ne 0 ]; then
- cat apt_get.log apt_get_err.log
- fi
- echo "::endgroup::"
-
- - name: Setup JAVA
- uses: actions/setup-java@v5
- with:
- distribution: 'temurin' # See 'Supported distributions' for available options
- java-version: '21'
-
- - name: Install Go ${{ inputs.go-version }}
- uses: actions/setup-go@v6
- with:
- # go-version: ${{ inputs.go-version }}
- check-latest: true
- go-version-file: 'go.mod'
-
- - name: Determine GOMODCACHE
- shell: bash
- id: goenv
- run: |
- set -e
- echo "gomodcache=$(go env GOMODCACHE)" >> "$GITHUB_OUTPUT"
-
- - name: Restore cached GO pkg
- id: cache-gopkg
- uses: actions/cache/restore@v5
- with:
- path: "${{ steps.goenv.outputs.gomodcache }}"
- key: ${{ runner.os }}-gopkg
-
- - name: Install go tools
- if: steps.cache-gopkg.outputs.cache-hit != 'true'
- shell: bash
- run: |
- echo "::group::go install"
- set -e
- go mod download
- go install golang.org/x/tools/cmd/goimports@latest
- go install github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework@latest
- go install github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi@latest
- go install github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs@latest
- go install github.com/golangci/golangci-lint/v2/cmd/golangci-lint@latest
- echo "::endgroup::"
- - name: Run go mod tidy
- shell: bash
- run: go mod tidy
-
- - name: Save GO package Cache
- id: cache-gopkg-save
- uses: actions/cache/save@v5
- with:
- path: |
- ${{ steps.goenv.outputs.gomodcache }}
- key: ${{ runner.os }}-gopkg
-
- - name: Creating service_account file from json input
- if: inputs.service_account_json_content != ''
- shell: bash
- run: |
- echo "::group::create service account file"
- set -e
- set -o pipefail
-
- jsonFile="${{ inputs.service_account_json_file_path }}"
- jsonFile="${jsonFile:-x}"
- if [ "${jsonFile}" == "x" ]; then
- echo "no service account file path provided"
- exit 1
- fi
-
- if [ ! -f "${jsonFile}" ]; then
- echo "creating service account file '${{ inputs.service_account_json_file_path }}'"
- echo "${{ inputs.service_account_json_content }}" > stackit/"${{ inputs.service_account_json_file_path }}"
- fi
- ls -l stackit/"${{ inputs.service_account_json_file_path }}"
- echo "::endgroup::"
-
- - name: Creating service_account file from base64 json input
- if: inputs.service_account_json_content_b64 != ''
- shell: bash
- run: |
- echo "::group::create service account file"
- set -e
- set -o pipefail
-
- jsonFile="${{ inputs.service_account_json_file_path }}"
- jsonFile="${jsonFile:-x}"
- if [ "${jsonFile}" == "x" ]; then
- echo "no service account file path provided"
- exit 1
- fi
-
- if [ ! -f "${jsonFile}" ]; then
- echo "creating service account file '${{ inputs.service_account_json_file_path }}'"
- echo "${{ inputs.service_account_json_content_b64 }}" | base64 -d > stackit/"${{ inputs.service_account_json_file_path }}"
- fi
- ls -l stackit/"${{ inputs.service_account_json_file_path }}"
- echo "::endgroup::"
-
- - name: Run acceptance test file
- if: ${{ inputs.test_file != '' }}
- shell: bash
- run: |
- echo "::group::go test file"
- set -e
- set -o pipefail
-
- echo "Running acceptance tests for the terraform provider"
- cd stackit || exit 1
- TF_ACC=1 \
- TF_ACC_PROJECT_ID=${TF_ACC_PROJECT_ID} \
- TF_ACC_REGION=${TF_ACC_REGION} \
- TF_ACC_TEST_PROJECT_USER_EMAIL=${TF_ACC_TEST_PROJECT_USER_EMAIL} \
- TF_ACC_SERVICE_ACCOUNT_FILE="${PWD}/${{ inputs.service_account_json_file_path }}" \
- TF_ACC_KEK_KEY_ID=${TF_ACC_KEK_KEY_ID} \
- TF_ACC_KEK_KEY_RING_ID=${TF_ACC_KEK_KEY_RING_ID} \
- TF_ACC_KEK_KEY_VERSION=${TF_ACC_KEK_KEY_VERSION} \
- TF_ACC_KEK_SERVICE_ACCOUNT=${TF_ACC_KEK_SERVICE_ACCOUNT} \
- go test ${{ inputs.test_file }} -count=1 -timeout=${{ inputs.test_timeout_string }}
- echo "::endgroup::"
- env:
- TF_ACC_PROJECT_ID: ${{ inputs.project_id }}
- TF_ACC_REGION: ${{ inputs.region }}
- TF_ACC_TEST_PROJECT_USER_EMAIL: ${{ inputs.project_user_email }}
- TF_ACC_KEK_KEY_ID: ${{ inputs.tf_acc_kek_key_id }}
- TF_ACC_KEK_KEY_RING_ID: ${{ inputs.tf_acc_kek_key_ring_id }}
- TF_ACC_KEK_KEY_VERSION: ${{ inputs.tf_acc_kek_key_version }}
- TF_ACC_KEK_SERVICE_ACCOUNT: ${{ inputs.tf_acc_kek_service_account }}
-
-# - name: Run test action
-# if: ${{ inputs.test_file == '' }}
-# env:
-# TF_ACC: 1
-# TF_ACC_PROJECT_ID: ${{ inputs.project_id }}
-# TF_ACC_REGION: ${{ inputs.region }}
-# TF_ACC_TEST_PROJECT_USER_EMAIL: ${{ inputs.project_user_email }}
-# TF_ACC_KEK_KEY_ID: ${{ inputs.tf_acc_kek_key_id }}
-# TF_ACC_KEK_KEY_RING_ID: ${{ inputs.tf_acc_kek_key_ring_id }}
-# TF_ACC_KEK_KEY_VERSION: ${{ inputs.tf_acc_kek_key_version }}
-# TF_ACC_KEK_SERVICE_ACCOUNT: ${{ inputs.tf_acc_kek_service_account }}
-# TF_ACC_SERVICE_ACCOUNT_FILE: "${PWD}/${{ inputs.service_account_json_file_path }}"
-# uses: robherley/go-test-action@v0.1.0
-# with:
-# testArguments: "./... -timeout 45m"
-
- - name: Run acceptance tests
- if: ${{ inputs.test_file == '' }}
- shell: bash
- run: |
- echo "::group::go test all"
- set -e
- set -o pipefail
-
- echo "Running acceptance tests for the terraform provider"
- cd stackit || exit 1
- TF_ACC=1 \
- TF_ACC_PROJECT_ID=${TF_ACC_PROJECT_ID} \
- TF_ACC_REGION=${TF_ACC_REGION} \
- TF_ACC_TEST_PROJECT_USER_EMAIL=${TF_ACC_TEST_PROJECT_USER_EMAIL} \
- TF_ACC_SERVICE_ACCOUNT_FILE="${PWD}/${{ inputs.service_account_json_file_path }}" \
- TF_ACC_KEK_KEY_ID=${TF_ACC_KEK_KEY_ID} \
- TF_ACC_KEK_KEY_RING_ID=${TF_ACC_KEK_KEY_RING_ID} \
- TF_ACC_KEK_KEY_VERSION=${TF_ACC_KEK_KEY_VERSION} \
- TF_ACC_KEK_SERVICE_ACCOUNT=${TF_ACC_KEK_SERVICE_ACCOUNT} \
- go test ./... -count=1 -timeout=${{ inputs.test_timeout_string }}
- echo "::endgroup::"
- env:
- TF_ACC_PROJECT_ID: ${{ inputs.project_id }}
- TF_ACC_REGION: ${{ inputs.region }}
- TF_ACC_TEST_PROJECT_USER_EMAIL: ${{ inputs.project_user_email }}
- TF_ACC_KEK_KEY_ID: ${{ inputs.tf_acc_kek_key_id }}
- TF_ACC_KEK_KEY_RING_ID: ${{ inputs.tf_acc_kek_key_ring_id }}
- TF_ACC_KEK_KEY_VERSION: ${{ inputs.tf_acc_kek_key_version }}
- TF_ACC_KEK_SERVICE_ACCOUNT: ${{ inputs.tf_acc_kek_service_account }}
diff --git a/.github/actions/build/action.yaml b/.github/actions/build/action.yaml
index 7bea976a..fe544618 100644
--- a/.github/actions/build/action.yaml
+++ b/.github/actions/build/action.yaml
@@ -1,3 +1,4 @@
+
name: Build
description: "Build pipeline"
inputs:
@@ -20,63 +21,25 @@ runs:
run: |
set -e
apt-get -y -qq update
- apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget unzip bc
+ apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget
- - name: Checkout
- uses: actions/checkout@v6
- name: Install Go ${{ inputs.go-version }}
uses: actions/setup-go@v6
with:
- # go-version: ${{ inputs.go-version }}
+ go-version: ${{ inputs.go-version }}
check-latest: true
go-version-file: 'go.mod'
- - name: Determine GOMODCACHE
- shell: bash
- id: goenv
- run: |
- set -e
- # echo "::set-output name=gomodcache::$(go env GOMODCACHE)"
- echo "gomodcache=$(go env GOMODCACHE)" >> "$GITHUB_OUTPUT"
-
- - name: Restore cached GO pkg
- id: cache-gopkg
- uses: actions/cache/restore@v5
- with:
- path: "${{ steps.goenv.outputs.gomodcache }}"
- key: ${{ runner.os }}-gopkg
-
- name: Install go tools
- if: steps.cache-gopkg.outputs.cache-hit != 'true'
shell: bash
run: |
set -e
go install golang.org/x/tools/cmd/goimports@latest
go install github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework@latest
go install github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi@latest
- go install github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs@latest
+ go install github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs@v0.24.0
-# - name: Run build pkg directory
-# shell: bash
-# run: |
-# set -e
-# go run generator/main.go build
-
- - name: Get all go packages
- if: steps.cache-gopkg.outputs.cache-hit != 'true'
- shell: bash
- run: |
- set -e
- go get ./...
-
- - name: Save Cache
- id: cache-gopkg-save
- uses: actions/cache/save@v5
- with:
- path: |
- ${{ steps.goenv.outputs.gomodcache }}
- key: ${{ runner.os }}-gopkg
- name: Setup JAVA ${{ inputs.java-distribution }} ${{ inputs.go-version }}
uses: actions/setup-java@v5
@@ -84,6 +47,16 @@ runs:
distribution: ${{ inputs.java-distribution }} # See 'Supported distributions' for available options
java-version: ${{ inputs.java-version }}
+ - name: Checkout
+ uses: actions/checkout@v6
+
+ - name: Run build pkg directory
+ shell: bash
+ run: |
+ set -e
+ go run cmd/main.go build
+
+
- name: Run make to build app
shell: bash
run: |
diff --git a/.github/actions/setup-cache-go/action.yaml b/.github/actions/setup-cache-go/action.yaml
deleted file mode 100644
index d352db76..00000000
--- a/.github/actions/setup-cache-go/action.yaml
+++ /dev/null
@@ -1,71 +0,0 @@
-name: 'Setup Go and cache dependencies'
-author: 'Forgejo authors, Marcel S. Henselin'
-description: |
- Wrap the setup-go with improved dependency caching.
-
-inputs:
- username:
- description: 'User for which to manage the dependency cache'
- default: root
-
- go-version:
- description: "go version to install"
- default: '1.25'
- required: true
-
-runs:
- using: "composite"
- steps:
- - name: "Install zstd for faster caching"
- shell: bash
- run: |
- apt-get update -qq
- apt-get -q install -qq -y zstd
-
- - name: "Set up Go using setup-go"
- uses: https://code.forgejo.org/actions/setup-go@v6
- id: go-version
- with:
- # go-version: ${{ inputs.go-version }}
- check-latest: true # Always check for the latest patch release
- go-version-file: "go.mod"
- # do not cache dependencies, we do this manually
- cache: false
-
- - name: "Get go environment information"
- shell: bash
- id: go-environment
- run: |
- chmod 755 $HOME # ensure ${RUN_AS_USER} has permission when go is located in $HOME
- export GOROOT="$(go env GOROOT)"
- echo "modcache=$(su ${RUN_AS_USER} -c '${GOROOT}/bin/go env GOMODCACHE')" >> "$GITHUB_OUTPUT"
- echo "cache=$(su ${RUN_AS_USER} -c '${GOROOT}/bin/go env GOCACHE')" >> "$GITHUB_OUTPUT"
- env:
- RUN_AS_USER: ${{ inputs.username }}
- GO_VERSION: ${{ steps.go-version.outputs.go-version }}
-
- - name: "Create cache folders with correct permissions (for non-root users)"
- shell: bash
- if: inputs.username != 'root'
- # when the cache is restored, only the permissions of the last part are restored
- # so assuming that /home/user exists and we are restoring /home/user/go/pkg/mod,
- # both folders will have the correct permissions, but
- # /home/user/go and /home/user/go/pkg might be owned by root
- run: |
- su ${RUN_AS_USER} -c 'mkdir -p "${MODCACHE_DIR}" "${CACHE_DIR}"'
- env:
- RUN_AS_USER: ${{ inputs.username }}
- MODCACHE_DIR: ${{ steps.go-environment.outputs.modcache }}
- CACHE_DIR: ${{ steps.go-environment.outputs.cache }}
-
- - name: "Restore Go dependencies from cache or mark for later caching"
- id: cache-deps
- uses: https://code.forgejo.org/actions/cache@v5
- with:
- key: setup-cache-go-deps-${{ runner.os }}-${{ inputs.username }}-${{ steps.go-version.outputs.go_version }}-${{ hashFiles('go.sum', 'go.mod') }}
- restore-keys: |
- setup-cache-go-deps-${{ runner.os }}-${{ inputs.username }}-${{ steps.go-version.outputs.go_version }}-
- setup-cache-go-deps-${{ runner.os }}-${{ inputs.username }}-
- path: |
- ${{ steps.go-environment.outputs.modcache }}
- ${{ steps.go-environment.outputs.cache }}
diff --git a/.github/workflows/ci.yaml.bak b/.github/workflows/ci.yaml
similarity index 57%
rename from .github/workflows/ci.yaml.bak
rename to .github/workflows/ci.yaml
index 6a3a8eb0..fbc3f339 100644
--- a/.github/workflows/ci.yaml.bak
+++ b/.github/workflows/ci.yaml
@@ -6,11 +6,6 @@ on:
- alpha
- main
workflow_dispatch:
- schedule:
- # every sunday at 00:00
- # - cron: '0 0 * * 0'
- # every day at 00:00
- - cron: '0 0 * * *'
push:
branches:
- '!main'
@@ -22,39 +17,6 @@ env:
CODE_COVERAGE_ARTIFACT_NAME: "code-coverage"
jobs:
- runner_test:
- name: "Test STACKIT runner"
- runs-on: stackit-docker
- steps:
- - name: Install needed tools
- run: |
- apt-get -y -qq update
- apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget
-
- - name: Setup Go
- uses: actions/setup-go@v6
- with:
- go-version: ${{ env.GO_VERSION }}
-
- - name: Install go tools
- run: |
- go install golang.org/x/tools/cmd/goimports@latest
- go install github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework@latest
- go install github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi@latest
-
- - name: Setup JAVA
- uses: actions/setup-java@v5
- with:
- distribution: 'temurin' # See 'Supported distributions' for available options
- java-version: '21'
-
- - name: Checkout
- uses: actions/checkout@v6
-
- - name: Run build pkg directory
- run: |
- go run cmd/main.go build
-
publish_test:
name: "Test readiness for publishing provider"
needs: config
@@ -137,78 +99,20 @@ jobs:
--gpgPubKeyFile=public_key.pem \
--version=${VERSION}
- testing:
- name: CI run tests
- runs-on: ubuntu-latest
- needs: config
- env:
- TF_ACC_PROJECT_ID: ${{ vars.TF_ACC_PROJECT_ID }}
- TF_ACC_REGION: ${{ vars.TF_ACC_REGION }}
- TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL: ${{ vars.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL }}
- TF_ACC_SERVICE_ACCOUNT_FILE: "~/service_account.json"
- steps:
- - name: Checkout
- uses: actions/checkout@v6
-
- - name: Build
- uses: ./.github/actions/build
- with:
- go-version: ${{ env.GO_VERSION }}
-
- - name: Setup Terraform
- uses: hashicorp/setup-terraform@v2
- with:
- terraform_wrapper: false
-
- - name: Create service account json file
- if: ${{ github.event_name == 'pull_request' }}
- run: |
- echo "${{ secrets.TF_ACC_SERVICE_ACCOUNT_JSON }}" >~/service_account.json
-
- - name: Run go mod tidy
- if: ${{ github.event_name == 'pull_request' }}
- run: go mod tidy
-
- - name: Testing
- run: make test
-
- - name: Acceptance Testing
- env:
- TF_ACC: "1"
- if: ${{ github.event_name == 'pull_request' }}
- run: make test-acceptance-tf
-
- - name: Check coverage threshold
- shell: bash
- run: |
- make coverage
- COVERAGE=$(go tool cover -func=coverage.out | grep total | awk '{print $3}' | sed 's/%//')
- echo "Coverage: $COVERAGE%"
- if (( $(echo "$COVERAGE < 80" | bc -l) )); then
- echo "Coverage is below 80%"
- # exit 1
- fi
-
- - name: Archive code coverage results
- uses: actions/upload-artifact@v4
- with:
- name: ${{ env.CODE_COVERAGE_ARTIFACT_NAME }}
- path: "stackit/${{ env.CODE_COVERAGE_FILE_NAME }}"
main:
- if: ${{ github.event_name != 'schedule' }}
- name: CI run build and linting
+ name: CI
runs-on: ubuntu-latest
needs: config
steps:
- name: Checkout
- uses: actions/checkout@v6
-
+ uses: actions/checkout@v4
+
- name: Build
uses: ./.github/actions/build
with:
go-version: ${{ env.GO_VERSION }}
-
+
- name: Setup Terraform
uses: hashicorp/setup-terraform@v2
with:
@@ -226,45 +130,27 @@ jobs:
- name: golangci-lint
uses: golangci/golangci-lint-action@v9
with:
- version: v2.9
+ version: v2.7
args: --config=golang-ci.yaml --allow-parallel-runners --timeout=5m
- continue-on-error: true
- - name: Linting
+ - name: Lint
run: make lint
- continue-on-error: true
+
+ - name: Test
+ run: make test
- # - name: Testing
- # run: make test
- #
- # - name: Acceptance Testing
- # if: ${{ github.event_name == 'pull_request' }}
- # run: make test-acceptance-tf
- #
- # - name: Check coverage threshold
- # shell: bash
- # run: |
- # make coverage
- # COVERAGE=$(go tool cover -func=coverage.out | grep total | awk '{print $3}' | sed 's/%//')
- # echo "Coverage: $COVERAGE%"
- # if (( $(echo "$COVERAGE < 80" | bc -l) )); then
- # echo "Coverage is below 80%"
- # # exit 1
- # fi
-
- # - name: Archive code coverage results
- # uses: actions/upload-artifact@v4
- # with:
- # name: ${{ env.CODE_COVERAGE_ARTIFACT_NAME }}
- # path: "stackit/${{ env.CODE_COVERAGE_FILE_NAME }}"
+ - name: Archive code coverage results
+ uses: actions/upload-artifact@v4
+ with:
+ name: ${{ env.CODE_COVERAGE_ARTIFACT_NAME }}
+ path: "stackit/${{ env.CODE_COVERAGE_FILE_NAME }}"
config:
- if: ${{ github.event_name != 'schedule' }}
name: Check GoReleaser config
runs-on: ubuntu-latest
steps:
- name: Checkout
- uses: actions/checkout@v6
+ uses: actions/checkout@v4
- name: Check GoReleaser
uses: goreleaser/goreleaser-action@v6
diff --git a/.github/workflows/ci_new.yaml b/.github/workflows/ci_new.yaml
deleted file mode 100644
index 35deb76c..00000000
--- a/.github/workflows/ci_new.yaml
+++ /dev/null
@@ -1,343 +0,0 @@
-name: CI Workflow
-
-on:
- pull_request:
- branches:
- - alpha
- - main
- workflow_dispatch:
- schedule:
- # every sunday at 00:00
- # - cron: '0 0 * * 0'
- # every day at 00:00
- - cron: '0 0 * * *'
- push:
- branches:
- - '!main'
- - '!alpha'
- paths:
- - '!.github'
-
-env:
- GO_VERSION: "1.25"
- CODE_COVERAGE_FILE_NAME: "coverage.out" # must be the same as in Makefile
- CODE_COVERAGE_ARTIFACT_NAME: "code-coverage"
-
-jobs:
- config:
- if: ${{ github.event_name != 'schedule' }}
- name: Check GoReleaser config
- runs-on: ubuntu-latest
- steps:
- - name: Checkout
- uses: actions/checkout@v6
-
- - name: Check GoReleaser
- uses: goreleaser/goreleaser-action@v7
- with:
- args: check
-
- prepare:
- name: Prepare GO cache
- runs-on: ubuntu-latest
- permissions:
- actions: read # Required to identify workflow run.
- checks: write # Required to add status summary.
- contents: read # Required to checkout repository.
- pull-requests: write # Required to add PR comment.
- steps:
- - name: Checkout
- uses: actions/checkout@v6
-
- - name: Install Go ${{ inputs.go-version }}
- id: go-install
- uses: actions/setup-go@v6
- with:
- # go-version: ${{ inputs.go-version }}
- check-latest: true
- go-version-file: 'go.mod'
-
- - name: Determine GOMODCACHE
- shell: bash
- id: goenv
- run: |
- set -e
- # echo "::set-output name=gomodcache::$(go env GOMODCACHE)"
- echo "gomodcache=$(go env GOMODCACHE)" >> "$GITHUB_OUTPUT"
-
- - name: Restore cached GO pkg
- id: cache-gopkg
- uses: actions/cache/restore@v5
- with:
- path: "${{ steps.goenv.outputs.gomodcache }}"
- key: ${{ runner.os }}-gopkg
-
- - name: Install go tools
- if: steps.cache-gopkg.outputs.cache-hit != 'true'
- run: |
- go install golang.org/x/tools/cmd/goimports@latest
- go install github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework@latest
- go install github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi@latest
-
- - name: Get all go packages
- if: steps.cache-gopkg.outputs.cache-hit != 'true'
- shell: bash
- run: |
- set -e
- go get ./...
-
- - name: Save Cache
- if: steps.cache-gopkg.outputs.cache-hit != 'true'
- id: cache-gopkg-save
- uses: actions/cache/save@v5
- with:
- path: |
- ${{ steps.goenv.outputs.gomodcache }}
- key: ${{ runner.os }}-gopkg
-
-
- publish_test:
- name: "Test readiness for publishing provider"
- needs:
- - config
- - prepare
- runs-on: ubuntu-latest
- permissions:
- actions: read # Required to identify workflow run.
- checks: write # Required to add status summary.
- contents: read # Required to checkout repository.
- pull-requests: write # Required to add PR comment.
- steps:
- - name: Install needed tools
- run: |
- apt-get -y -qq update
- apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget unzip bc
-
- - name: Checkout
- uses: actions/checkout@v6
-
- - name: Setup Go
- uses: actions/setup-go@v6
- with:
- # go-version: ${{ env.GO_VERSION }}
- check-latest: true
- go-version-file: 'go.mod'
-
- - name: Install go tools
- run: |
- go install golang.org/x/tools/cmd/goimports@latest
- go install github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework@latest
- go install github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi@latest
-
- - name: Setup JAVA
- uses: actions/setup-java@v5
- with:
- distribution: 'temurin' # See 'Supported distributions' for available options
- java-version: '21'
-
-# - name: Run build pkg directory
-# run: |
-# go run generator/main.go build
-
- - name: Set up s3cfg
- run: |
- cat <<'EOF' >> ~/.s3cfg
- [default]
- host_base = https://object.storage.eu01.onstackit.cloud
- host_bucket = https://%(bucket).object.storage.eu01.onstackit.cloud
- check_ssl_certificate = False
- access_key = ${{ secrets.S3_ACCESS_KEY }}
- secret_key = ${{ secrets.S3_SECRET_KEY }}
- EOF
-
- - name: Import GPG key
- run: |
- echo "${{ secrets.PRIVATE_KEY_PEM }}" > ~/private.key.pem
- gpg --import ~/private.key.pem
- rm ~/private.key.pem
-
- - name: Run GoReleaser with SNAPSHOT
- id: goreleaser
- env:
- GITHUB_TOKEN: ${{ env.FORGEJO_TOKEN }}
- GPG_FINGERPRINT: ${{ secrets.GPG_FINGERPRINT }}
- uses: goreleaser/goreleaser-action@v7
- with:
- args: release --skip publish --clean --snapshot
-
- - name: Prepare key file
- run: |
- echo "${{ secrets.PUBLIC_KEY_PEM }}" >public_key.pem
-
- - name: Prepare provider directory structure
- run: |
- VERSION=$(jq -r .version < dist/metadata.json)
- go run generator/main.go \
- publish \
- --namespace=mhenselin \
- --providerName=stackitprivatepreview \
- --repoName=terraform-provider-stackitprivatepreview \
- --domain=tfregistry.sysops.stackit.rocks \
- --gpgFingerprint="${{ secrets.GPG_FINGERPRINT }}" \
- --gpgPubKeyFile=public_key.pem \
- --version=${VERSION}
-
- testing:
- name: CI run tests
- runs-on: ubuntu-latest
- needs:
- - config
- - prepare
- env:
- TF_ACC_PROJECT_ID: ${{ vars.TF_ACC_PROJECT_ID }}
- TF_ACC_ORGANIZATION_ID: ${{ vars.TF_ACC_ORGANIZATION_ID }}
- TF_ACC_REGION: ${{ vars.TF_ACC_REGION }}
- TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL: ${{ vars.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL }}
- TF_ACC_SERVICE_ACCOUNT_FILE: "~/service_account.json"
- steps:
- - name: Checkout
- uses: actions/checkout@v6
-
- - name: Build
- uses: ./.github/actions/build
- with:
- go-version: ${{ env.GO_VERSION }}
-
- - name: Setup Terraform
- uses: hashicorp/setup-terraform@v2
- with:
- terraform_wrapper: false
-
- - name: Create service account json file
- if: ${{ github.event_name == 'pull_request' }}
- run: |
- echo "${{ secrets.TF_ACC_SERVICE_ACCOUNT_JSON }}" >~/.service_account.json
-
- - name: Run go mod tidy
- if: ${{ github.event_name == 'pull_request' }}
- run: go mod tidy
-
- - name: Testing
- run: |
- TF_ACC_SERVICE_ACCOUNT_FILE=~/.service_account.json
- export TF_ACC_SERVICE_ACCOUNT_FILE
- make test
-
-# - name: Acceptance Testing
-# env:
-# TF_ACC: "1"
-# if: ${{ github.event_name == 'pull_request' }}
-# run: |
-# TF_ACC_SERVICE_ACCOUNT_FILE=~/.service_account.json
-# export TF_ACC_SERVICE_ACCOUNT_FILE
-# make test-acceptance-tf
-
- - name: Run Test
- if: ${{ github.event_name == 'pull_request' }}
- uses: ./.github/actions/acc_test
- with:
- go-version: ${{ env.GO_VERSION }}
- project_id: ${{ vars.TF_ACC_PROJECT_ID }}
- region: ${{ vars.TF_ACC_REGION }}
- service_account_json_content_b64: "${{ secrets.TF_ACC_SERVICE_ACCOUNT_JSON_B64 }}"
- project_user_email: ${{ vars.TEST_PROJECT_USER_EMAIL }}
- tf_acc_kek_key_id: ${{ vars.TF_ACC_KEK_KEY_ID }}
- tf_acc_kek_key_ring_id: ${{ vars.TF_ACC_KEK_KEY_RING_ID }}
- tf_acc_kek_key_version: ${{ vars.TF_ACC_KEK_KEY_VERSION }}
- tf_acc_kek_service_account: ${{ vars.TF_ACC_KEK_SERVICE_ACCOUNT }}
- # service_account_json_file_path: "~/service_account.json"
-
- - name: Check coverage threshold
- shell: bash
- run: |
- make coverage
- COVERAGE=$(go tool cover -func=coverage.out | grep total | awk '{print $3}' | sed 's/%//')
- echo "Coverage: $COVERAGE%"
- if (( $(echo "$COVERAGE < 80" | bc -l) )); then
- echo "Coverage is below 80%"
- # exit 1
- fi
-
- - name: Archive code coverage results
- uses: actions/upload-artifact@v4
- with:
- name: ${{ env.CODE_COVERAGE_ARTIFACT_NAME }}
- path: "stackit/${{ env.CODE_COVERAGE_FILE_NAME }}"
-
- main:
- if: ${{ github.event_name != 'schedule' }}
- name: CI run build and linting
- runs-on: ubuntu-latest
- needs:
- - config
- - prepare
- steps:
- - name: Checkout
- uses: actions/checkout@v6
-
-# - uses: actions/cache@v5
-# id: cache
-# with:
-# path: path/to/dependencies
-# key: ${{ runner.os }}-${{ hashFiles('**/lockfiles') }}
-
-# - name: Install Dependencies
-# if: steps.cache.outputs.cache-hit != 'true'
-# run: /install.sh
-
- - name: Build
- uses: ./.github/actions/build
- with:
- go-version: ${{ env.GO_VERSION }}
-
- - name: Setup Terraform
- uses: hashicorp/setup-terraform@v2
- with:
- terraform_wrapper: false
-
- - name: "Ensure docs are up-to-date"
- if: ${{ github.event_name == 'pull_request' }}
- run: ./scripts/check-docs.sh
- continue-on-error: true
-
- - name: "Run go mod tidy"
- if: ${{ github.event_name == 'pull_request' }}
- run: go mod tidy
-
- - name: golangci-lint
- uses: golangci/golangci-lint-action@v9
- with:
- version: v2.10
- args: --config=.golang-ci.yaml --allow-parallel-runners --timeout=5m
- continue-on-error: true
-
- - name: Linting terraform files
- run: make lint-tf
- continue-on-error: true
-
- code_coverage:
- name: "Code coverage report"
- if: github.event_name == 'pull_request' # Do not run when workflow is triggered by push to main branch
- runs-on: ubuntu-latest
- needs:
- - main
- - prepare
- permissions:
- contents: read
- actions: read # to download code coverage results from "main" job
- pull-requests: write # write permission needed to comment on PR
- steps:
- - name: Install needed tools
- shell: bash
- run: |
- set -e
- apt-get -y -qq update
- apt-get -y -qq install sudo
-
- - name: Check new code coverage
- uses: fgrosse/go-coverage-report@v1.2.0
- continue-on-error: true # Add this line to prevent pipeline failures in forks
- with:
- coverage-artifact-name: ${{ env.CODE_COVERAGE_ARTIFACT_NAME }}
- coverage-file-name: ${{ env.CODE_COVERAGE_FILE_NAME }}
- root-package: 'github.com/stackitcloud/terraform-provider-stackit'
diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml
index e6245e52..fba2a7d9 100644
--- a/.github/workflows/publish.yaml
+++ b/.github/workflows/publish.yaml
@@ -23,7 +23,7 @@ jobs:
uses: actions/checkout@v6
- name: Check GoReleaser
- uses: goreleaser/goreleaser-action@v7
+ uses: goreleaser/goreleaser-action@v6
with:
args: check
@@ -43,15 +43,10 @@ jobs:
apt-get -y -qq update
apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget
- - name: Checkout
- uses: actions/checkout@v6
-
- name: Setup Go
uses: actions/setup-go@v6
with:
- # go-version: ${{ env.GO_VERSION }}
- check-latest: true
- go-version-file: 'go.mod'
+ go-version: ${{ env.GO_VERSION }}
- name: Install go tools
run: |
@@ -65,6 +60,13 @@ jobs:
distribution: 'temurin' # See 'Supported distributions' for available options
java-version: '21'
+ - name: Checkout
+ uses: actions/checkout@v6
+
+ - name: Run build pkg directory
+ run: |
+ go run cmd/main.go build
+
- name: Set up s3cfg
run: |
cat <<'EOF' >> ~/.s3cfg
@@ -88,7 +90,7 @@ jobs:
env:
GITHUB_TOKEN: ${{ env.FORGEJO_TOKEN }}
GPG_FINGERPRINT: ${{ secrets.GPG_FINGERPRINT }}
- uses: goreleaser/goreleaser-action@v7
+ uses: goreleaser/goreleaser-action@v6
with:
args: release --skip publish --clean --snapshot
@@ -98,7 +100,7 @@ jobs:
env:
GITHUB_TOKEN: ${{ env.FORGEJO_TOKEN }}
GPG_FINGERPRINT: ${{ secrets.GPG_FINGERPRINT }}
- uses: goreleaser/goreleaser-action@v7
+ uses: goreleaser/goreleaser-action@v6
with:
args: release --skip publish --clean
@@ -109,7 +111,7 @@ jobs:
- name: Prepare provider directory structure
run: |
VERSION=$(jq -r .version < dist/metadata.json)
- go run generator/main.go \
+ go run cmd/main.go \
publish \
--namespace=mhenselin \
--providerName=stackitprivatepreview \
@@ -125,16 +127,3 @@ jobs:
cd release/
s3cmd put --recursive v1 s3://terraform-provider-privatepreview/
s3cmd put --recursive .well-known s3://terraform-provider-privatepreview/
-
- - name: Import SSH key
- run: |
- mkdir -p ~/.ssh
- echo "${{ secrets.DOCS_UPLOAD_SSH_KEY }}" > ~/.ssh/id_ed25519
- chmod 0600 ~/.ssh/id_ed25519
-
- - name: Upload docs via scp
- run: |
- set -e
- ssh -o StrictHostKeyChecking=no ubuntu@${{ vars.DOCS_SERVER_IP }} 'rm -rf /srv/www/docs'
- echo "${{ github.ref_name }}" >docs/_version.txt
- scp -o StrictHostKeyChecking=no -r docs ubuntu@${{ vars.DOCS_SERVER_IP }}:/srv/www/
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
index 79547c9a..254c40f2 100644
--- a/.github/workflows/release.yaml
+++ b/.github/workflows/release.yaml
@@ -18,23 +18,21 @@ jobs:
goreleaser:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v6
+ - uses: actions/checkout@v4
with:
# Allow goreleaser to access older tag information.
fetch-depth: 0
-
- - uses: https://code.forgejo.org/actions/setup-go@v6
+ - uses: actions/setup-go@v5
with:
go-version-file: "go.mod"
cache: true
-
- name: Import GPG key
uses: crazy-max/ghaction-import-gpg@v6
id: import_gpg
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
- name: Run GoReleaser
- uses: goreleaser/goreleaser-action@v7
+ uses: goreleaser/goreleaser-action@v6
with:
args: release --clean
env:
diff --git a/.github/workflows/renovate.yaml b/.github/workflows/renovate.yaml
index 90adebe6..12454b9f 100644
--- a/.github/workflows/renovate.yaml
+++ b/.github/workflows/renovate.yaml
@@ -11,7 +11,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
- uses: actions/checkout@v6
+ uses: actions/checkout@v4
- name: Self-hosted Renovate
uses: renovatebot/github-action@v41.0.0
with:
diff --git a/.github/workflows/runnerstats.yaml b/.github/workflows/runnerstats.yaml
deleted file mode 100644
index 08190d4c..00000000
--- a/.github/workflows/runnerstats.yaml
+++ /dev/null
@@ -1,29 +0,0 @@
-name: Runner stats
-
-on:
- workflow_dispatch:
-
-jobs:
- stats-own:
- name: "Get own runner stats"
- runs-on: ubuntu-latest
- steps:
- - name: Install needed tools
- run: |
- apt-get -y -qq update
- apt-get -y -qq install inxi
-
- - name: Show stats
- run: inxi -c 0
-
- stats-stackit:
- name: "Get STACKIT runner stats"
- runs-on: stackit-docker
- steps:
- - name: Install needed tools
- run: |
- apt-get -y -qq update
- apt-get -y -qq install inxi
-
- - name: Show stats
- run: inxi -c 0
diff --git a/.github/workflows/tf-acc-test.yaml b/.github/workflows/tf-acc-test.yaml
index b409df26..a8e6a53f 100644
--- a/.github/workflows/tf-acc-test.yaml
+++ b/.github/workflows/tf-acc-test.yaml
@@ -7,23 +7,21 @@ on:
workflow_dispatch:
jobs:
- acc_test:
+ main:
name: Acceptance Tests
runs-on: ubuntu-latest
steps:
- name: Checkout
- uses: actions/checkout@v6
-
- - name: Run Test
- uses: ./.github/actions/acc_test
- with:
- go-version: ${{ env.GO_VERSION }}
- project_id: ${{ vars.TF_ACC_PROJECT_ID }}
- region: 'eu01'
- service_account_json_content_b64: "${{ secrets.TF_ACC_SERVICE_ACCOUNT_JSON_B64 }}"
- project_user_email: ${{ vars.TEST_PROJECT_USER_EMAIL }}
- tf_acc_kek_key_id: ${{ vars.TF_ACC_KEK_KEY_ID }}
- tf_acc_kek_key_ring_id: ${{ vars.TF_ACC_KEK_KEY_RING_ID }}
- tf_acc_kek_key_version: ${{ vars.TF_ACC_KEK_KEY_VERSION }}
- tf_acc_kek_service_account: ${{ vars.TF_ACC_KEK_SERVICE_ACCOUNT }}
- # service_account_json_file_path: "~/service_account.json"
+ uses: actions/checkout@v4
+ - name: Install project tools and dependencies
+ run: make project-tools
+ - name: Run tests
+ run: |
+ make test-acceptance-tf TF_ACC_PROJECT_ID=$${{ secrets.TF_ACC_PROJECT_ID }} TF_ACC_ORGANIZATION_ID=$${{ secrets.TF_ACC_ORGANIZATION_ID }} TF_ACC_REGION="eu01"
+ env:
+ STACKIT_SERVICE_ACCOUNT_TOKEN: ${{ secrets.TF_ACC_SERVICE_ACCOUNT_TOKEN }}
+ TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL: ${{ secrets.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL }}
+ TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN: ${{ secrets.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN }}
+ TF_ACC_TEST_PROJECT_PARENT_CONTAINER_ID: ${{ secrets.TF_ACC_TEST_PROJECT_PARENT_CONTAINER_ID }}
+ TF_ACC_TEST_PROJECT_PARENT_UUID: ${{ secrets.TF_ACC_TEST_PROJECT_PARENT_UUID }}
+ TF_ACC_TEST_PROJECT_USER_EMAIL: ${{ secrets.TF_ACC_TEST_PROJECT_USER_EMAIL }}
diff --git a/.gitignore b/.gitignore
index 0b45cb57..8b2a63bb 100644
--- a/.gitignore
+++ b/.gitignore
@@ -40,12 +40,8 @@ coverage.out
coverage.html
generated
stackit-sdk-generator
-stackit-sdk-generator/**
dist
.secrets
pkg_gen
-/release/
-.env
-**/.env
diff --git a/.goreleaser.yaml b/.goreleaser.yaml
index e0aafe37..3e9105ca 100644
--- a/.goreleaser.yaml
+++ b/.goreleaser.yaml
@@ -19,20 +19,20 @@ builds:
ldflags:
- '-s -w -X main.version={{.Version}} -X main.commit={{.Commit}}'
goos:
- - freebsd
- - windows
+# - freebsd
+# - windows
- linux
- darwin
goarch:
- amd64
- - '386'
- - arm
+# - '386'
+# - arm
- arm64
- ignore:
- - goos: darwin
- goarch: '386'
- - goos: windows
- goarch: arm
+# ignore:
+# - goos: darwin
+# goarch: '386'
+# - goos: windows
+# goarch: arm
binary: '{{ .ProjectName }}_v{{ .Version }}'
archives:
- formats: [ 'zip' ]
diff --git a/Makefile b/Makefile
index 8b74e830..c6b3f9ac 100644
--- a/Makefile
+++ b/Makefile
@@ -12,20 +12,17 @@ project-tools:
# LINT
lint-golangci-lint:
@echo "Linting with golangci-lint"
- @go run github.com/golangci/golangci-lint/v2/cmd/golangci-lint run --fix --config .golang-ci.yaml
+ @$(SCRIPTS_BASE)/lint-golangci-lint.sh
-
-lint-tf:
+lint-tf:
@echo "Linting terraform files"
- @terraform fmt -check -diff -recursive examples/
- @terraform fmt -check -diff -recursive stackit/
+ @terraform fmt -check -diff -recursive
lint: lint-golangci-lint lint-tf
# DOCUMENTATION GENERATION
generate-docs:
@echo "Generating documentation with tfplugindocs"
-
@$(SCRIPTS_BASE)/tfplugindocs.sh
build:
@@ -37,16 +34,15 @@ fmt:
@terraform fmt -diff -recursive
# TEST
-.PHONY: test coverage
test:
@echo "Running tests for the terraform provider"
- @cd $(ROOT_DIR)/stackit && go test -timeout 0 ./... -count=1 -coverprofile=../coverage.out && cd $(ROOT_DIR)
+ @cd $(ROOT_DIR)/stackit && go test ./... -count=1 -coverprofile=coverage.out && cd $(ROOT_DIR)
# Test coverage
coverage:
@echo ">> Creating test coverage report for the terraform provider"
- @cd $(ROOT_DIR)/stackit && (go test -timeout 0 ./... -count=1 -coverprofile=../coverage.out || true) && cd $(ROOT_DIR)
- @cd $(ROOT_DIR)/stackit && go tool cover -html=../coverage.out -o ../coverage.html && cd $(ROOT_DIR)
+ @cd $(ROOT_DIR)/stackit && (go test ./... -count=1 -coverprofile=coverage.out || true) && cd $(ROOT_DIR)
+ @cd $(ROOT_DIR)/stackit && go tool cover -html=coverage.out -o coverage.html && cd $(ROOT_DIR)
test-acceptance-tf:
@if [ -z $(TF_ACC_PROJECT_ID) ]; then echo "Input TF_ACC_PROJECT_ID missing"; exit 1; fi
diff --git a/README.md b/README.md
index ab79f28e..1da34359 100644
--- a/README.md
+++ b/README.md
@@ -1,14 +1,15 @@
+

+
+
-# STACKIT Terraform Provider
(PRIVATE PREVIEW)
+# STACKIT Terraform Provider
-[](https://registry.terraform.io/providers/stackitcloud/stackit/latest)  [](https://www.apache.org/licenses/LICENSE-2.0)
+[](https://goreportcard.com/report/github.com/stackitcloud/terraform-provider-stackit) [](https://registry.terraform.io/providers/stackitcloud/stackit/latest)  [](https://www.apache.org/licenses/LICENSE-2.0)
-This project is the **NOT** official [Terraform Provider](https://registry.terraform.io/providers/stackitcloud/stackit/latest/docs) for [STACKIT](https://www.stackit.de/en/)!
-
-This a **private preview only**, which allows you to manage STACKIT resources through Terraform.
+This project is the official [Terraform Provider](https://registry.terraform.io/providers/stackitcloud/stackit/latest/docs) for [STACKIT](https://www.stackit.de/en/), which allows you to manage STACKIT resources through Terraform.
## Getting Started
@@ -17,22 +18,20 @@ To install the [STACKIT Terraform Provider](https://registry.terraform.io/provid
```hcl
terraform {
required_providers {
- stackitprivatepreview = {
- source = "tfregistry.sysops.stackit.rocks/mhenselin/stackitprivatepreview"
- version = "= 0.0.5-alpha"
+ stackit = {
+ source = "stackitcloud/stackit"
+ version = "X.X.X"
}
}
}
-provider "stackitprivatepreview" {
+provider "stackit" {
# Configuration options
}
```
Check one of the examples in the [examples](examples/) folder.
-TODO: revise the following sections
-
## Authentication
To authenticate, you will need a [service account](https://docs.stackit.cloud/platform/access-and-identity/service-accounts/). Create it in the [STACKIT Portal](https://portal.stackit.cloud/) and assign the necessary permissions to it, e.g. `project.owner`. There are multiple ways to authenticate:
diff --git a/cmd/cmd/build/build.go b/cmd/cmd/build/build.go
new file mode 100644
index 00000000..81ea75a0
--- /dev/null
+++ b/cmd/cmd/build/build.go
@@ -0,0 +1,737 @@
+package build
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "io"
+ "log"
+ "log/slog"
+ "os"
+ "os/exec"
+ "path"
+ "path/filepath"
+ "regexp"
+ "strconv"
+ "strings"
+ "text/template"
+
+ "github.com/ldez/go-git-cmd-wrapper/v2/clone"
+ "github.com/ldez/go-git-cmd-wrapper/v2/git"
+)
+
+const (
+ OAS_REPO_NAME = "stackit-api-specifications"
+ OAS_REPO = "https://github.com/stackitcloud/stackit-api-specifications.git"
+ GEN_REPO_NAME = "stackit-sdk-generator"
+ GEN_REPO = "https://github.com/stackitcloud/stackit-sdk-generator.git"
+)
+
+type version struct {
+ verString string
+ major int
+ minor int
+}
+
+func Build() error {
+ slog.Info("Starting Builder")
+ root, err := getRoot()
+ if err != nil {
+ log.Fatal(err)
+ }
+ if root == nil || *root == "" {
+ return fmt.Errorf("unable to determine root directory from git")
+ }
+ slog.Info("Using root directory", "dir", *root)
+
+ slog.Info("Cleaning up old generator directory")
+ err = os.RemoveAll(path.Join(*root, GEN_REPO_NAME))
+ if err != nil {
+ return err
+ }
+
+ slog.Info("Cleaning up old packages directory")
+ err = os.RemoveAll(path.Join(*root, "pkg_gen"))
+ if err != nil {
+ return err
+ }
+
+ slog.Info("Creating generator dir", "dir", fmt.Sprintf("%s/%s", *root, GEN_REPO_NAME))
+ genDir, err := createGeneratorDir(*root, GEN_REPO, GEN_REPO_NAME)
+ if err != nil {
+ return err
+ }
+
+ slog.Info("Creating oas dir", "dir", fmt.Sprintf("%s/%s", *root, OAS_REPO_NAME))
+ repoDir, err := createRepoDir(genDir, OAS_REPO, OAS_REPO_NAME)
+ if err != nil {
+ return fmt.Errorf("%s", err.Error())
+ }
+
+ slog.Info("Retrieving versions from subdirs")
+ // TODO - major
+ verMap, err := getVersions(repoDir)
+ if err != nil {
+ return fmt.Errorf("%s", err.Error())
+ }
+
+ slog.Info("Reducing to only latest or highest")
+ res, err := getOnlyLatest(verMap)
+ if err != nil {
+ return fmt.Errorf("%s", err.Error())
+ }
+
+ slog.Info("Creating OAS dir")
+ err = os.MkdirAll(path.Join(genDir, "oas"), 0755)
+ if err != nil {
+ return err
+ }
+
+ slog.Info("Copying OAS files")
+ for service, item := range res {
+ baseService := strings.TrimSuffix(service, "alpha")
+ baseService = strings.TrimSuffix(baseService, "beta")
+ itemVersion := fmt.Sprintf("v%d%s", item.major, item.verString)
+ if item.minor != 0 {
+ itemVersion = itemVersion + "" + strconv.Itoa(item.minor)
+ }
+ srcFile := path.Join(
+ repoDir,
+ "services",
+ baseService,
+ itemVersion,
+ fmt.Sprintf("%s.json", baseService),
+ )
+ dstFile := path.Join(genDir, "oas", fmt.Sprintf("%s.json", service))
+ _, err = copyFile(srcFile, dstFile)
+ if err != nil {
+ return fmt.Errorf("%s", err.Error())
+ }
+ }
+
+ slog.Info("Cleaning up", "dir", repoDir)
+ err = os.RemoveAll(filepath.Dir(repoDir))
+ if err != nil {
+ return fmt.Errorf("%s", err.Error())
+ }
+
+ slog.Info("Changing dir", "dir", genDir)
+ err = os.Chdir(genDir)
+ if err != nil {
+ return err
+ }
+
+ slog.Info("Calling make", "command", "generate-go-sdk")
+ cmd := exec.Command("make", "generate-go-sdk")
+ var stdOut, stdErr bytes.Buffer
+ cmd.Stdout = &stdOut
+ cmd.Stderr = &stdErr
+
+ if err = cmd.Start(); err != nil {
+ slog.Error("cmd.Start", "error", err)
+ return err
+ }
+
+ if err = cmd.Wait(); err != nil {
+ var exitErr *exec.ExitError
+ if errors.As(err, &exitErr) {
+ slog.Error("cmd.Wait", "code", exitErr.ExitCode(), "error", err, "stdout", stdOut.String(), "stderr", stdErr.String())
+ return fmt.Errorf("%s", stdErr.String())
+ }
+ if err != nil {
+ slog.Error("cmd.Wait", "err", err)
+ return err
+ }
+ }
+
+ slog.Info("Cleaning up go.mod and go.sum files")
+ cleanDir := path.Join(genDir, "sdk-repo-updated", "services")
+ dirEntries, err := os.ReadDir(cleanDir)
+ if err != nil {
+ return err
+ }
+ for _, entry := range dirEntries {
+ if entry.IsDir() {
+ err = deleteFiles(
+ path.Join(cleanDir, entry.Name(), "go.mod"),
+ path.Join(cleanDir, entry.Name(), "go.sum"),
+ )
+ if err != nil {
+ return err
+ }
+ }
+ }
+
+ slog.Info("Changing dir", "dir", *root)
+ err = os.Chdir(*root)
+ if err != nil {
+ return err
+ }
+
+ slog.Info("Rearranging package directories")
+ err = os.MkdirAll(path.Join(*root, "pkg_gen"), 0755) // noqa:gosec
+ if err != nil {
+ return err
+ }
+ srcDir := path.Join(genDir, "sdk-repo-updated", "services")
+ items, err := os.ReadDir(srcDir)
+ if err != nil {
+ return err
+ }
+ for _, item := range items {
+ if item.IsDir() {
+ slog.Info(" -> package", "name", item.Name())
+ tgtDir := path.Join(*root, "pkg_gen", item.Name())
+ // no backup needed as we generate new
+ //bakName := fmt.Sprintf("%s.%s", item.Name(), time.Now().Format("20060102-150405"))
+ //if _, err = os.Stat(tgtDir); !os.IsNotExist(err) {
+ // err = os.Rename(
+ // tgtDir,
+ // path.Join(*root, "pkg", bakName),
+ // )
+ // if err != nil {
+ // return err
+ // }
+ //}
+ err = os.Rename(path.Join(srcDir, item.Name()), tgtDir)
+ if err != nil {
+ return err
+ }
+
+ // wait is placed outside now
+ //if _, err = os.Stat(path.Join(*root, "pkg", bakName, "wait")); !os.IsNotExist(err) {
+ // slog.Info(" Copying wait subfolder")
+ // err = os.Rename(path.Join(*root, "pkg", bakName, "wait"), path.Join(tgtDir, "wait"))
+ // if err != nil {
+ // return err
+ // }
+ //}
+ }
+ }
+
+ slog.Info("Checking needed commands available")
+ err = checkCommands([]string{"tfplugingen-framework", "tfplugingen-openapi"})
+ if err != nil {
+ return err
+ }
+
+ slog.Info("Generating service boilerplate")
+ err = generateServiceFiles(*root, path.Join(*root, GEN_REPO_NAME))
+ if err != nil {
+ return err
+ }
+
+ slog.Info("Copying all service files")
+ err = CopyDirectory(
+ path.Join(*root, "generated", "internal", "services"),
+ path.Join(*root, "stackit", "internal", "services"),
+ )
+ if err != nil {
+ return err
+ }
+
+ err = createBoilerplate(*root, path.Join(*root, "stackit", "internal", "services"))
+ if err != nil {
+ return err
+ }
+
+ slog.Info("Finally removing temporary files and directories")
+ //err = os.RemoveAll(path.Join(*root, "generated"))
+ //if err != nil {
+ // slog.Error("RemoveAll", "dir", path.Join(*root, "generated"), "err", err)
+ // return err
+ //}
+
+ err = os.RemoveAll(path.Join(*root, GEN_REPO_NAME))
+ if err != nil {
+ slog.Error("RemoveAll", "dir", path.Join(*root, GEN_REPO_NAME), "err", err)
+ return err
+ }
+
+ slog.Info("Done")
+ return nil
+}
+
+type templateData struct {
+ PackageName string
+ NameCamel string
+ NamePascal string
+ NameSnake string
+}
+
+func fileExists(path string) bool {
+ _, err := os.Stat(path)
+ if os.IsNotExist(err) {
+ return false
+ }
+ if err != nil {
+ panic(err)
+ }
+ return true
+}
+
+func createBoilerplate(rootFolder, folder string) error {
+ services, err := os.ReadDir(folder)
+ if err != nil {
+ return err
+ }
+ for _, svc := range services {
+ if !svc.IsDir() {
+ continue
+ }
+ resources, err := os.ReadDir(path.Join(folder, svc.Name()))
+ if err != nil {
+ return err
+ }
+
+ var handleDS bool
+ var handleRes bool
+ var foundDS bool
+ var foundRes bool
+
+ for _, res := range resources {
+ if !res.IsDir() {
+ continue
+ }
+
+ resourceName := res.Name()
+
+ dsFile := path.Join(folder, svc.Name(), res.Name(), "datasources_gen", fmt.Sprintf("%s_data_source_gen.go", res.Name()))
+ handleDS = fileExists(dsFile)
+
+ resFile := path.Join(folder, svc.Name(), res.Name(), "resources_gen", fmt.Sprintf("%s_resource_gen.go", res.Name()))
+ handleRes = fileExists(resFile)
+
+ dsGoFile := path.Join(folder, svc.Name(), res.Name(), "datasource.go")
+ foundDS = fileExists(dsGoFile)
+
+ resGoFile := path.Join(folder, svc.Name(), res.Name(), "resource.go")
+ foundRes = fileExists(resGoFile)
+
+ if handleDS && !foundDS {
+ slog.Info("Creating missing datasource.go", "service", svc.Name(), "resource", resourceName)
+ if !ValidateSnakeCase(resourceName) {
+ return errors.New("resource name is invalid")
+ }
+
+ tplName := "data_source_scaffold.gotmpl"
+ err = writeTemplateToFile(
+ tplName,
+ path.Join(rootFolder, "tools", "templates", tplName),
+ path.Join(folder, svc.Name(), res.Name(), "datasource.go"),
+ &templateData{
+ PackageName: svc.Name(),
+ NameCamel: ToCamelCase(resourceName),
+ NamePascal: ToPascalCase(resourceName),
+ NameSnake: resourceName,
+ },
+ )
+ if err != nil {
+ panic(err)
+ }
+ }
+
+ if handleRes && !foundRes {
+ slog.Info("Creating missing resource.go", "service", svc.Name(), "resource", resourceName)
+ if !ValidateSnakeCase(resourceName) {
+ return errors.New("resource name is invalid")
+ }
+
+ tplName := "resource_scaffold.gotmpl"
+ err = writeTemplateToFile(
+ tplName,
+ path.Join(rootFolder, "tools", "templates", tplName),
+ path.Join(folder, svc.Name(), res.Name(), "resource.go"),
+ &templateData{
+ PackageName: svc.Name(),
+ NameCamel: ToCamelCase(resourceName),
+ NamePascal: ToPascalCase(resourceName),
+ NameSnake: resourceName,
+ },
+ )
+ if err != nil {
+ return err
+ }
+ }
+ }
+ }
+ return nil
+}
+
+func ucfirst(s string) string {
+ if len(s) == 0 {
+ return ""
+ }
+ return strings.ToUpper(s[:1]) + s[1:]
+}
+
+func writeTemplateToFile(tplName, tplFile, outFile string, data *templateData) error {
+ fn := template.FuncMap{
+ "ucfirst": ucfirst,
+ }
+
+ tmpl, err := template.New(tplName).Funcs(fn).ParseFiles(tplFile)
+ if err != nil {
+ return err
+ }
+
+ var f *os.File
+ f, err = os.Create(outFile)
+ if err != nil {
+ return err
+ }
+
+ err = tmpl.Execute(f, *data)
+ if err != nil {
+ return err
+ }
+
+ err = f.Close()
+ if err != nil {
+ return err
+ }
+ return nil
+}
+
+func generateServiceFiles(rootDir, generatorDir string) error {
+ // slog.Info("Generating specs folder")
+ err := os.MkdirAll(path.Join(rootDir, "generated", "specs"), 0755)
+ if err != nil {
+ return err
+ }
+
+ specs, err := os.ReadDir(path.Join(rootDir, "service_specs"))
+ if err != nil {
+ return err
+ }
+ for _, spec := range specs {
+ if spec.IsDir() {
+ continue
+ }
+ // slog.Info("Checking spec", "name", spec.Name())
+ r := regexp.MustCompile(`^([a-z-]+)_(.*)_config.yml$`)
+ matches := r.FindAllStringSubmatch(spec.Name(), -1)
+ if matches != nil {
+ fileName := matches[0][0]
+ service := matches[0][1]
+ resource := matches[0][2]
+ slog.Info(
+ "Found service spec",
+ "name",
+ spec.Name(),
+ "service",
+ service,
+ "resource",
+ resource,
+ )
+
+ for _, part := range []string{"alpha", "beta"} {
+ oasFile := path.Join(generatorDir, "oas", fmt.Sprintf("%s%s.json", service, part))
+ if _, err = os.Stat(oasFile); !os.IsNotExist(err) {
+ slog.Info("found matching oas", "service", service, "version", part)
+ scName := fmt.Sprintf("%s%s", service, part)
+ scName = strings.ReplaceAll(scName, "-", "")
+ err = os.MkdirAll(path.Join(rootDir, "generated", "internal", "services", scName, resource), 0755)
+ if err != nil {
+ return err
+ }
+
+ // slog.Info("Generating openapi spec json")
+ specFile := path.Join(rootDir, "generated", "specs", fmt.Sprintf("%s_%s_spec.json", scName, resource))
+
+ var stdOut, stdErr bytes.Buffer
+
+ // noqa:gosec
+ cmd := exec.Command(
+ "tfplugingen-openapi",
+ "generate",
+ "--config",
+ path.Join(rootDir, "service_specs", fileName),
+ "--output",
+ specFile,
+ oasFile,
+ )
+ cmd.Stdout = &stdOut
+ cmd.Stderr = &stdErr
+
+ if err = cmd.Start(); err != nil {
+ slog.Error("tfplugingen-openapi generate", "error", err)
+ return err
+ }
+
+ if err = cmd.Wait(); err != nil {
+ var exitErr *exec.ExitError
+ if errors.As(err, &exitErr) {
+ slog.Error("tfplugingen-openapi generate", "code", exitErr.ExitCode(), "error", err, "stdout", stdOut.String(), "stderr", stdErr.String())
+ return fmt.Errorf("%s", stdErr.String())
+ }
+ if err != nil {
+ slog.Error("tfplugingen-openapi generate", "err", err, "stdout", stdOut.String(), "stderr", stdErr.String())
+ return err
+ }
+ }
+
+ // slog.Info("Creating terraform service resource files folder")
+ tgtFolder := path.Join(rootDir, "generated", "internal", "services", scName, resource, "resources_gen")
+ err = os.MkdirAll(tgtFolder, 0755)
+ if err != nil {
+ return err
+ }
+
+ // slog.Info("Generating terraform service resource files")
+
+ // noqa:gosec
+ cmd2 := exec.Command(
+ "tfplugingen-framework",
+ "generate",
+ "resources",
+ "--input",
+ specFile,
+ "--output",
+ tgtFolder,
+ "--package",
+ scName,
+ )
+
+ cmd2.Stdout = &stdOut
+ cmd2.Stderr = &stdErr
+ if err = cmd2.Start(); err != nil {
+ slog.Error("tfplugingen-framework generate resources", "error", err)
+ return err
+ }
+
+ if err = cmd2.Wait(); err != nil {
+ var exitErr *exec.ExitError
+ if errors.As(err, &exitErr) {
+ slog.Error("tfplugingen-framework generate resources", "code", exitErr.ExitCode(), "error", err, "stdout", stdOut.String(), "stderr", stdErr.String())
+ return fmt.Errorf("%s", stdErr.String())
+ }
+ if err != nil {
+ slog.Error("tfplugingen-framework generate resources", "err", err, "stdout", stdOut.String(), "stderr", stdErr.String())
+ return err
+ }
+ }
+
+ // slog.Info("Creating terraform service datasource files folder")
+ tgtFolder = path.Join(rootDir, "generated", "internal", "services", scName, resource, "datasources_gen")
+ err = os.MkdirAll(tgtFolder, 0755)
+ if err != nil {
+ return err
+ }
+
+ // slog.Info("Generating terraform service resource files")
+
+ // noqa:gosec
+ cmd3 := exec.Command(
+ "tfplugingen-framework",
+ "generate",
+ "data-sources",
+ "--input",
+ specFile,
+ "--output",
+ tgtFolder,
+ "--package",
+ scName,
+ )
+ var stdOut3, stdErr3 bytes.Buffer
+ cmd3.Stdout = &stdOut3
+ cmd3.Stderr = &stdErr3
+
+ if err = cmd3.Start(); err != nil {
+ slog.Error("tfplugingen-framework generate data-sources", "error", err)
+ return err
+ }
+
+ if err = cmd3.Wait(); err != nil {
+ var exitErr *exec.ExitError
+ if errors.As(err, &exitErr) {
+ slog.Error("tfplugingen-framework generate data-sources", "code", exitErr.ExitCode(), "error", err, "stdout", stdOut.String(), "stderr", stdErr.String())
+ return fmt.Errorf("%s", stdErr.String())
+ }
+ if err != nil {
+ slog.Error("tfplugingen-framework generate data-sources", "err", err, "stdout", stdOut.String(), "stderr", stdErr.String())
+ return err
+ }
+ }
+ }
+ }
+ }
+ }
+ return nil
+}
+
+func checkCommands(commands []string) error {
+ for _, commandName := range commands {
+ if !commandExists(commandName) {
+ return fmt.Errorf("missing command %s", commandName)
+ }
+ slog.Info("found", "command", commandName)
+ }
+ return nil
+}
+
+func commandExists(cmd string) bool {
+ _, err := exec.LookPath(cmd)
+ return err == nil
+}
+
+func deleteFiles(fNames ...string) error {
+ for _, fName := range fNames {
+ if _, err := os.Stat(fName); !os.IsNotExist(err) {
+ err = os.Remove(fName)
+ if err != nil {
+ return err
+ }
+ }
+ }
+ return nil
+}
+
+func copyFile(src, dst string) (int64, error) {
+ sourceFileStat, err := os.Stat(src)
+ if err != nil {
+ return 0, err
+ }
+
+ if !sourceFileStat.Mode().IsRegular() {
+ return 0, fmt.Errorf("%s is not a regular file", src)
+ }
+
+ source, err := os.Open(src)
+ if err != nil {
+ return 0, err
+ }
+ defer source.Close()
+
+ destination, err := os.Create(dst)
+ if err != nil {
+ return 0, err
+ }
+ defer destination.Close()
+ nBytes, err := io.Copy(destination, source)
+ return nBytes, err
+}
+
+func getOnlyLatest(m map[string]version) (map[string]version, error) {
+ tmpMap := make(map[string]version)
+ for k, v := range m {
+ item, ok := tmpMap[k]
+ if !ok {
+ tmpMap[k] = v
+ } else {
+ if item.major == v.major && item.minor < v.minor {
+ tmpMap[k] = v
+ }
+ }
+ }
+ return tmpMap, nil
+}
+
+func getVersions(dir string) (map[string]version, error) {
+ res := make(map[string]version)
+ children, err := os.ReadDir(path.Join(dir, "services"))
+ if err != nil {
+ return nil, err
+ }
+
+ for _, entry := range children {
+ if entry.IsDir() {
+ versions, err := os.ReadDir(path.Join(dir, "services", entry.Name()))
+ if err != nil {
+ return nil, err
+ }
+ m, err2 := extractVersions(entry.Name(), versions)
+ if err2 != nil {
+ return m, err2
+ }
+ for k, v := range m {
+ res[k] = v
+ }
+ }
+ }
+ return res, nil
+}
+
+func extractVersions(service string, versionDirs []os.DirEntry) (map[string]version, error) {
+ res := make(map[string]version)
+ for _, vDir := range versionDirs {
+ if vDir.IsDir() {
+ r := regexp.MustCompile(`v([0-9]+)([a-z]+)([0-9]*)`)
+ matches := r.FindAllStringSubmatch(vDir.Name(), -1)
+ if matches == nil {
+ continue
+ }
+ svc, ver, err := handleVersion(service, matches[0])
+ if err != nil {
+ return nil, err
+ }
+
+ if svc != nil && ver != nil {
+ res[*svc] = *ver
+ }
+ }
+ }
+ return res, nil
+}
+
+func handleVersion(service string, match []string) (*string, *version, error) {
+ if match == nil {
+ fmt.Println("no matches")
+ return nil, nil, nil
+ }
+ verString := match[2]
+ if verString != "alpha" && verString != "beta" {
+ return nil, nil, errors.New("unsupported version")
+ }
+ majVer, err := strconv.Atoi(match[1])
+ if err != nil {
+ return nil, nil, err
+ }
+ if match[3] == "" {
+ match[3] = "0"
+ }
+ minVer, err := strconv.Atoi(match[3])
+ if err != nil {
+ return nil, nil, err
+ }
+ resStr := fmt.Sprintf("%s%s", service, verString)
+ return &resStr, &version{verString: verString, major: majVer, minor: minVer}, nil
+}
+
+func createRepoDir(root, repoUrl, repoName string) (string, error) {
+ oasTmpDir, err := os.MkdirTemp(root, "oas-tmp")
+ if err != nil {
+ return "", err
+ }
+ targetDir := path.Join(oasTmpDir, repoName)
+ _, err = git.Clone(
+ clone.Repository(repoUrl),
+ clone.Directory(targetDir),
+ )
+ if err != nil {
+ return "", err
+ }
+ return targetDir, nil
+}
+
+func createGeneratorDir(root, repoUrl, repoName string) (string, error) {
+ targetDir := path.Join(root, repoName)
+ _, err := git.Clone(
+ clone.Repository(repoUrl),
+ clone.Directory(targetDir),
+ )
+ if err != nil {
+ return "", err
+ }
+ return targetDir, nil
+}
+
+func getRoot() (*string, error) {
+ cmd := exec.Command("git", "rev-parse", "--show-toplevel")
+ out, err := cmd.Output()
+ if err != nil {
+ return nil, err
+ }
+ lines := strings.Split(string(out), "\n")
+ return &lines[0], nil
+}
diff --git a/generator/cmd/build/copy.go b/cmd/cmd/build/copy.go
similarity index 88%
rename from generator/cmd/build/copy.go
rename to cmd/cmd/build/copy.go
index e1243c05..ec0affe9 100644
--- a/generator/cmd/build/copy.go
+++ b/cmd/cmd/build/copy.go
@@ -3,7 +3,6 @@ package build
import (
"fmt"
"io"
- "log/slog"
"os"
"path/filepath"
"syscall"
@@ -75,24 +74,14 @@ func Copy(srcFile, dstFile string) error {
return err
}
- defer func(out *os.File) {
- err := out.Close()
- if err != nil {
- slog.Error("failed to close file", slog.Any("err", err))
- }
- }(out)
+ defer out.Close()
in, err := os.Open(srcFile)
if err != nil {
return err
}
- defer func(in *os.File) {
- err := in.Close()
- if err != nil {
- slog.Error("error closing destination file", slog.Any("err", err))
- }
- }(in)
+ defer in.Close()
_, err = io.Copy(out, in)
if err != nil {
diff --git a/generator/cmd/build/formats.go b/cmd/cmd/build/formats.go
similarity index 100%
rename from generator/cmd/build/formats.go
rename to cmd/cmd/build/formats.go
diff --git a/cmd/cmd/build/templates/data_source_scaffold.gotmpl b/cmd/cmd/build/templates/data_source_scaffold.gotmpl
new file mode 100644
index 00000000..d13021c7
--- /dev/null
+++ b/cmd/cmd/build/templates/data_source_scaffold.gotmpl
@@ -0,0 +1,51 @@
+package {{.PackageName}}
+
+import (
+ "context"
+
+ "github.com/hashicorp/terraform-plugin-framework/datasource"
+ "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg/{{.PackageName}}"
+
+ {{.PackageName}}Gen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/{{.PackageName}}/{{.NameSnake}}/datasources_gen"
+)
+
+var _ datasource.DataSource = (*{{.NameCamel}}DataSource)(nil)
+
+func New{{.NamePascal}}DataSource() datasource.DataSource {
+ return &{{.NameCamel}}DataSource{}
+}
+
+type {{.NameCamel}}DataSource struct{
+ client *{{.PackageName}}.APIClient
+ providerData core.ProviderData
+}
+
+func (d *{{.NameCamel}}DataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
+ resp.TypeName = req.ProviderTypeName + "_{{.PackageName}}_{{.NameSnake}}"
+}
+
+func (d *{{.NameCamel}}DataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
+ resp.Schema = {{.PackageName}}Gen.{{.NamePascal}}DataSourceSchema(ctx)
+}
+
+func (d *{{.NameCamel}}DataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
+ var data {{.PackageName}}Gen.{{.NameCamel}}Model
+
+ // Read Terraform configuration data into the model
+ resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ // Todo: Read API call logic
+
+ // Example data value setting
+ // data.Id = types.StringValue("example-id")
+
+ // Save data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+}
diff --git a/generator/cmd/build/templates/provider_scaffold.gotmpl b/cmd/cmd/build/templates/provider_scaffold.gotmpl
similarity index 100%
rename from generator/cmd/build/templates/provider_scaffold.gotmpl
rename to cmd/cmd/build/templates/provider_scaffold.gotmpl
diff --git a/cmd/cmd/build/templates/resource_scaffold.gotmpl b/cmd/cmd/build/templates/resource_scaffold.gotmpl
new file mode 100644
index 00000000..cdd38853
--- /dev/null
+++ b/cmd/cmd/build/templates/resource_scaffold.gotmpl
@@ -0,0 +1,208 @@
+package {{.PackageName}}
+
+import (
+ "context"
+
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
+
+ {{.PackageName}}Gen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/{{.PackageName}}/{{.NameSnake}}/resources_gen"
+)
+
+var (
+ _ resource.Resource = &{{.NameCamel}}Resource{}
+ _ resource.ResourceWithConfigure = &{{.NameCamel}}Resource{}
+ _ resource.ResourceWithImportState = &{{.NameCamel}}Resource{}
+ _ resource.ResourceWithModifyPlan = &{{.NameCamel}}Resource{}
+)
+
+func New{{.NamePascal}}Resource() resource.Resource {
+ return &{{.NameCamel}}Resource{}
+}
+
+type {{.NameCamel}}Resource struct{
+ client *{{.PackageName}}.APIClient
+ providerData core.ProviderData
+}
+
+func (r *{{.NameCamel}}Resource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
+ resp.TypeName = req.ProviderTypeName + "_{{.PackageName}}_{{.NameSnake}}"
+}
+
+func (r *{{.NameCamel}}Resource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
+ resp.Schema = {{.PackageName}}Gen.{{.NamePascal}}ResourceSchema(ctx)
+}
+
+// Configure adds the provider configured client to the resource.
+func (r *{{.NameCamel}}Resource) Configure(
+ ctx context.Context,
+ req resource.ConfigureRequest,
+ resp *resource.ConfigureResponse,
+) {
+ var ok bool
+ r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
+ if !ok {
+ return
+ }
+
+ apiClientConfigOptions := []config.ConfigurationOption{
+ config.WithCustomAuth(r.providerData.RoundTripper),
+ utils.UserAgentConfigOption(r.providerData.Version),
+ }
+ if r.providerData.PostgresFlexCustomEndpoint != "" {
+ apiClientConfigOptions = append(apiClientConfigOptions, config.WithEndpoint(r.providerData.PostgresFlexCustomEndpoint))
+ } else {
+ apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(r.providerData.GetRegion()))
+ }
+ apiClient, err := {{.PackageName}}.NewAPIClient(apiClientConfigOptions...)
+ if err != nil {
+ resp.Diagnostics.AddError( "Error configuring API client", fmt.Sprintf("Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration", err))
+ return
+ }
+ r.client = apiClient
+ tflog.Info(ctx, "{{.PackageName}}.{{.NamePascal}} client configured")
+}
+
+func (r *{{.NameCamel}}Resource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
+ var data {{.PackageName}}Gen.{{.NamePascal}}Model
+
+ // Read Terraform plan data into the model
+ resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ // TODO: Create API call logic
+
+ // Example data value setting
+ data.{{.NameCamel | ucfirst}}Id = types.StringValue("id-from-response")
+
+ // Save data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+
+ tflog.Info(ctx, "{{.PackageName}}.{{.NamePascal}} created")
+}
+
+func (r *{{.NameCamel}}Resource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
+ var data {{.PackageName}}Gen.{{.NamePascal}}Model
+
+ // Read Terraform prior state data into the model
+ resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ // Todo: Read API call logic
+
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+
+ tflog.Info(ctx, "{{.PackageName}}.{{.NamePascal}} read")
+}
+
+func (r *{{.NameCamel}}Resource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
+ var data {{.PackageName}}Gen.{{.NamePascal}}Model
+
+ // Read Terraform plan data into the model
+ resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ // Todo: Update API call logic
+
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+
+ tflog.Info(ctx, "{{.PackageName}}.{{.NamePascal}} updated")
+}
+
+func (r *{{.NameCamel}}Resource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
+ var data {{.PackageName}}Gen.{{.NamePascal}}Model
+
+ // Read Terraform prior state data into the model
+ resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ // Todo: Delete API call logic
+
+ tflog.Info(ctx, "{{.PackageName}}.{{.NamePascal}} deleted")
+}
+
+// ModifyPlan implements resource.ResourceWithModifyPlan.
+// Use the modifier to set the effective region in the current plan.
+func (r *{{.NameCamel}}Resource) ModifyPlan(
+ ctx context.Context,
+ req resource.ModifyPlanRequest,
+ resp *resource.ModifyPlanResponse,
+) { // nolint:gocritic // function signature required by Terraform
+ var configModel {{.PackageName}}Gen.{{.NamePascal}}Model
+ // skip initial empty configuration to avoid follow-up errors
+ if req.Config.Raw.IsNull() {
+ return
+ }
+ resp.Diagnostics.Append(req.Config.Get(ctx, &configModel)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ var planModel {{.PackageName}}Gen.{{.NamePascal}}Model
+ resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ utils.AdaptRegion(ctx, configModel.Region, &planModel.Region, r.providerData.GetRegion(), resp)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(resp.Plan.Set(ctx, planModel)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+}
+
+// ImportState imports a resource into the Terraform state on success.
+// The expected format of the resource import identifier is: project_id,zone_id,record_set_id
+func (r *{{.NameCamel}}Resource) ImportState(
+ ctx context.Context,
+ req resource.ImportStateRequest,
+ resp *resource.ImportStateResponse,
+) {
+ idParts := strings.Split(req.ID, core.Separator)
+
+ // Todo: Import logic
+ if len(idParts) < 2 || idParts[0] == "" || idParts[1] == "" {
+ core.LogAndAddError(
+ ctx, &resp.Diagnostics,
+ "Error importing database",
+ fmt.Sprintf(
+ "Expected import identifier with format [project_id],[region],..., got %q",
+ req.ID,
+ ),
+ )
+ return
+ }
+
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
+ // ... more ...
+
+ core.LogAndAddWarning(
+ ctx,
+ &resp.Diagnostics,
+ "{{.PackageName | ucfirst}} database imported with empty password",
+ "The database password is not imported as it is only available upon creation of a new database. The password field will be empty.",
+ )
+ tflog.Info(ctx, "{{.PackageName | ucfirst}} {{.NameCamel}} state imported")
+}
diff --git a/cmd/cmd/buildCmd.go b/cmd/cmd/buildCmd.go
new file mode 100644
index 00000000..683c3536
--- /dev/null
+++ b/cmd/cmd/buildCmd.go
@@ -0,0 +1,17 @@
+package cmd
+
+import (
+ "github.com/spf13/cobra"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/cmd/cmd/build"
+)
+
+func NewBuildCmd() *cobra.Command {
+ return &cobra.Command{
+ Use: "build",
+ Short: "Build the necessary boilerplate",
+ Long: `...`,
+ RunE: func(cmd *cobra.Command, args []string) error {
+ return build.Build()
+ },
+ }
+}
diff --git a/generator/cmd/publish/architecture.go b/cmd/cmd/publish/architecture.go
similarity index 68%
rename from generator/cmd/publish/architecture.go
rename to cmd/cmd/publish/architecture.go
index 7316a03d..a2e6f6af 100644
--- a/generator/cmd/publish/architecture.go
+++ b/cmd/cmd/publish/architecture.go
@@ -35,27 +35,36 @@ type GpgPublicKey struct {
}
func (p *Provider) CreateArchitectureFiles() error {
+ // var namespace, provider, distPath, repoName, version, gpgFingerprint, gpgPubKeyFile, domain string
+
log.Println("* Creating architecture files in target directories")
+ // filename = terraform-provider-[provider]_0.0.1_darwin_amd64.zip - provider_name + version + target + architecture + .zip
+ // prefix := fmt.Sprintf("v1/providers/%s/%s/%s/", namespace, provider, version)
prefix := path.Join("v1", "providers", p.Namespace, p.Provider, p.Version)
+ // pathPrefix := fmt.Sprintf("release/%s", prefix)
pathPrefix := path.Join("release", prefix)
+ // urlPrefix := fmt.Sprintf("https://%s/%s", domain, prefix)
urlPrefix, err := url.JoinPath("https://", p.Domain, prefix)
if err != nil {
return fmt.Errorf("error creating base url: %w", err)
}
+ // download url = https://example.com/v1/providers/namespace/provider/0.0.1/download/terraform-provider_0.0.1_darwin_amd64.zip
downloadUrlPrefix, err := url.JoinPath(urlPrefix, "download")
if err != nil {
return fmt.Errorf("error crearting download url: %w", err)
}
downloadPathPrefix := path.Join(pathPrefix, "download")
+ // shasums url = https://example.com/v1/providers/namespace/provider/0.0.1/terraform-provider_0.0.1_SHA256SUMS
shasumsUrl, err := url.JoinPath(urlPrefix, fmt.Sprintf("%s_%s_SHA256SUMS", p.RepoName, p.Version))
if err != nil {
return fmt.Errorf("error creating shasums url: %w", err)
}
+ // shasums_signature_url = https://example.com/v1/providers/namespace/provider/0.0.1/terraform-provider_0.0.1_SHA256SUMS.sig
shasumsSigUrl := shasumsUrl + ".sig"
gpgAsciiPub, err := p.ReadGpgFile()
@@ -85,7 +94,7 @@ func (p *Provider) CreateArchitectureFiles() error {
archFileName := path.Join(downloadPathPrefix, target, arch)
a := Architecture{
- Protocols: []string{"5.1", "6.0"},
+ Protocols: []string{"5.1"},
OS: target,
Arch: arch,
FileName: sum.Path,
@@ -107,6 +116,33 @@ func (p *Provider) CreateArchitectureFiles() error {
},
},
}
+ // var architectureTemplate = []byte(fmt.Sprintf(`
+ //{
+ // "protocols": [
+ // "4.0",
+ // "5.1",
+ // "6.0"
+ // ],
+ // "os": "%s",
+ // "arch": "%s",
+ // "filename": "%s",
+ // "download_url": "%s",
+ // "shasums_url": "%s",
+ // "shasums_signature_url": "%s",
+ // "shasum": "%s",
+ // "signing_keys": {
+ // "gpg_public_keys": [
+ // {
+ // "key_id": "%s",
+ // "ascii_armor": "%s",
+ // "trust_signature": "",
+ // "source": "",
+ // "source_url": ""
+ // }
+ // ]
+ // }
+ //}
+ //`, target, arch, fileName, downloadUrl, shasumsUrl, shasumsSigUrl, shasum, gpgFingerprint, gpgAsciiPub))
log.Printf(" - Arch file: %s", archFileName)
@@ -124,12 +160,8 @@ func WriteArchitectureFile(filePath string, arch Architecture) error {
if err != nil {
return fmt.Errorf("error encoding data: %w", err)
}
- //nolint:gosec // this file is not sensitive, so we can use os.ModePerm
- err = os.WriteFile(
- filePath,
- jsonString,
- os.ModePerm,
- )
+
+ err = os.WriteFile(filePath, jsonString, os.ModePerm)
if err != nil {
return fmt.Errorf("error writing data: %w", err)
}
diff --git a/generator/cmd/publish/gpg.go b/cmd/cmd/publish/gpg.go
similarity index 100%
rename from generator/cmd/publish/gpg.go
rename to cmd/cmd/publish/gpg.go
diff --git a/generator/cmd/publish/provider.go b/cmd/cmd/publish/provider.go
similarity index 88%
rename from generator/cmd/publish/provider.go
rename to cmd/cmd/publish/provider.go
index 88849eb0..92a77b9a 100644
--- a/generator/cmd/publish/provider.go
+++ b/cmd/cmd/publish/provider.go
@@ -143,7 +143,7 @@ func (p *Provider) createVersionsFile() error {
// Build the versions file...
version := Version{
Version: p.Version,
- Protocols: []string{"5.1", "6.1"},
+ Protocols: []string{"5.1"},
Platforms: nil,
}
for _, sum := range shasums {
@@ -161,12 +161,10 @@ func (p *Provider) createVersionsFile() error {
target := fileNameSplit[2]
arch := fileNameSplit[3]
- version.Platforms = append(
- version.Platforms, Platform{
- OS: target,
- Arch: arch,
- },
- )
+ version.Platforms = append(version.Platforms, Platform{
+ OS: target,
+ Arch: arch,
+ })
}
data := Data{}
@@ -208,19 +206,16 @@ func (p *Provider) CreateWellKnown() error {
log.Println("* Creating .well-known directory")
pathString := path.Join(p.RootPath, "release", ".well-known")
- //nolint:gosec // this file is not sensitive, so we can use ModePerm
err := os.MkdirAll(pathString, os.ModePerm)
if err != nil && !errors.Is(err, fs.ErrExist) {
return fmt.Errorf("error creating '%s' dir: %w", pathString, err)
}
log.Println(" - Writing to .well-known/terraform.json file")
-
- //nolint:gosec // this file is not sensitive, so we can use 0644
err = os.WriteFile(
fmt.Sprintf("%s/terraform.json", pathString),
[]byte(`{"providers.v1": "/v1/providers/"}`),
- 0o644,
+ 0644,
)
if err != nil {
return err
@@ -229,10 +224,9 @@ func (p *Provider) CreateWellKnown() error {
return nil
}
-func CreateDir(pathValue string) error {
- log.Printf("* Creating %s directory", pathValue)
- //nolint:gosec // this file is not sensitive, so we can use ModePerm
- err := os.MkdirAll(pathValue, os.ModePerm)
+func CreateDir(path string) error {
+ log.Printf("* Creating %s directory", path)
+ err := os.MkdirAll(path, os.ModePerm)
if errors.Is(err, fs.ErrExist) {
return nil
}
@@ -275,23 +269,13 @@ func CopyFile(src, dst string) (int64, error) {
if err != nil {
return 0, err
}
- defer func(source *os.File) {
- err := source.Close()
- if err != nil {
- slog.Error("error closing source file", slog.Any("err", err))
- }
- }(source)
+ defer source.Close()
destination, err := os.Create(dst)
if err != nil {
return 0, err
}
- defer func(destination *os.File) {
- err := destination.Close()
- if err != nil {
- slog.Error("error closing destination file", slog.Any("err", err))
- }
- }(destination)
+ defer destination.Close()
nBytes, err := io.Copy(destination, source)
return nBytes, err
}
diff --git a/generator/cmd/publish/shasums.go b/cmd/cmd/publish/shasums.go
similarity index 100%
rename from generator/cmd/publish/shasums.go
rename to cmd/cmd/publish/shasums.go
diff --git a/generator/cmd/publish/versions.go b/cmd/cmd/publish/versions.go
similarity index 70%
rename from generator/cmd/publish/versions.go
rename to cmd/cmd/publish/versions.go
index 5f75d45d..4145612a 100644
--- a/generator/cmd/publish/versions.go
+++ b/cmd/cmd/publish/versions.go
@@ -22,25 +22,16 @@ type Platform struct {
}
type Data struct {
- Id string `json:"id,omitempty"`
Versions []Version `json:"versions"`
}
func (d *Data) WriteToFile(filePath string) error {
- // TODO: make it variable
- d.Id = "tfregistry.sysops.stackit.rocks/mhenselin/stackitprivatepreview"
-
jsonString, err := json.Marshal(d)
if err != nil {
return fmt.Errorf("error encoding data: %w", err)
}
- //nolint:gosec // this file is not sensitive, so we can use os.ModePerm
- err = os.WriteFile(
- filePath,
- jsonString,
- os.ModePerm,
- )
+ err = os.WriteFile(filePath, jsonString, os.ModePerm)
if err != nil {
return fmt.Errorf("error writing data: %w", err)
}
@@ -91,13 +82,7 @@ func (d *Data) LoadFromUrl(uri string) error {
if err != nil {
return err
}
- defer func(name string) {
- //nolint:gosec // The file path is generated by os.CreateTemp and is not user-controllable
- err := os.Remove(name)
- if err != nil {
- slog.Error("failed to remove temporary file", slog.Any("err", err))
- }
- }(file.Name()) // Clean up
+ defer os.Remove(file.Name()) // Clean up
err = DownloadFile(
u.String(),
@@ -134,30 +119,20 @@ func (v *Version) AddProtocol(p string) error {
// DownloadFile will download a url and store it in local filepath.
// It writes to the destination file as it downloads it, without
// loading the entire file into memory.
-func DownloadFile(urlValue, filepath string) error {
+func DownloadFile(url string, filepath string) error {
// Create the file
- //nolint:gosec // path traversal is not a concern here, as the filepath is generated by us and not user input
out, err := os.Create(filepath)
if err != nil {
return err
}
- defer func(out *os.File) {
- err := out.Close()
- if err != nil {
- slog.Error("failed to close file", slog.Any("err", err))
- }
- }(out)
+ defer out.Close()
// Get the data
-
- //nolint:gosec,bodyclose // this is a controlled URL, not user input
- resp, err := http.Get(urlValue)
+ resp, err := http.Get(url)
if err != nil {
return err
}
- defer func(Body io.ReadCloser) {
- _ = Body.Close()
- }(resp.Body)
+ defer resp.Body.Close()
// Write the body to file
_, err = io.Copy(out, resp.Body)
diff --git a/generator/cmd/publishCmd.go b/cmd/cmd/publishCmd.go
similarity index 84%
rename from generator/cmd/publishCmd.go
rename to cmd/cmd/publishCmd.go
index bdc5368f..22e3efb9 100644
--- a/generator/cmd/publishCmd.go
+++ b/cmd/cmd/publishCmd.go
@@ -10,8 +10,7 @@ import (
"path/filepath"
"github.com/spf13/cobra"
-
- publish2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/generator/cmd/publish"
+ publish2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/cmd/cmd/publish"
)
var (
@@ -29,32 +28,20 @@ var publishCmd = &cobra.Command{
Use: "publish",
Short: "Publish terraform provider",
Long: `...`,
- RunE: func(_ *cobra.Command, _ []string) error {
+ RunE: func(_ *cobra.Command, args []string) error {
return publish()
},
}
-func init() { //nolint:gochecknoinits //this is the standard way to set up cobra commands
+func init() { // nolint: gochecknoinits
publishCmd.Flags().StringVarP(&namespace, "namespace", "n", "", "Namespace for the Terraform registry.")
publishCmd.Flags().StringVarP(&domain, "domain", "d", "", "Domain for the Terraform registry.")
publishCmd.Flags().StringVarP(&providerName, "providerName", "p", "", "ProviderName for the Terraform registry.")
publishCmd.Flags().StringVarP(&distPath, "distPath", "x", "dist", "Dist Path for the Terraform registry.")
publishCmd.Flags().StringVarP(&repoName, "repoName", "r", "", "RepoName for the Terraform registry.")
publishCmd.Flags().StringVarP(&version, "version", "v", "", "Version for the Terraform registry.")
- publishCmd.Flags().StringVarP(
- &gpgFingerprint,
- "gpgFingerprint",
- "f",
- "",
- "GPG Fingerprint for the Terraform registry.",
- )
- publishCmd.Flags().StringVarP(
- &gpgPubKeyFile,
- "gpgPubKeyFile",
- "k",
- "",
- "GPG PubKey file name for the Terraform registry.",
- )
+ publishCmd.Flags().StringVarP(&gpgFingerprint, "gpgFingerprint", "f", "", "GPG Fingerprint for the Terraform registry.")
+ publishCmd.Flags().StringVarP(&gpgPubKeyFile, "gpgPubKeyFile", "k", "", "GPG PubKey file name for the Terraform registry.")
err := publishCmd.MarkFlagRequired("namespace")
if err != nil {
@@ -117,7 +104,6 @@ func publish() error {
// Create release dir - only the contents of this need to be uploaded to S3
log.Printf("* Creating release directory")
- //nolint:gosec // this directory is not sensitive, so we can use 0750
err = os.MkdirAll(path.Join(p.RootPath, "release"), os.ModePerm)
if err != nil && !errors.Is(err, fs.ErrExist) {
return fmt.Errorf("error creating '%s' dir: %w", path.Join(p.RootPath, "release"), err)
diff --git a/generator/cmd/rootCmd.go b/cmd/cmd/rootCmd.go
similarity index 100%
rename from generator/cmd/rootCmd.go
rename to cmd/cmd/rootCmd.go
diff --git a/cmd/main.go b/cmd/main.go
new file mode 100644
index 00000000..7704aa1d
--- /dev/null
+++ b/cmd/main.go
@@ -0,0 +1,27 @@
+package main
+
+import (
+ "log"
+ "os"
+
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/cmd/cmd"
+)
+
+func main() {
+ rootCmd := cmd.NewRootCmd()
+ //rootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "config file (default is $HOME/.cobra.yaml)")
+ //rootCmd.PersistentFlags().StringP("author", "a", "YOUR NAME", "author name for copyright attribution")
+ //rootCmd.PersistentFlags().StringVarP(&userLicense, "license", "l", "", "name of license for the project")
+
+ rootCmd.SetOut(os.Stdout)
+
+ rootCmd.AddCommand(
+ cmd.NewBuildCmd(),
+ cmd.NewPublishCmd(),
+ )
+
+ err := rootCmd.Execute()
+ if err != nil {
+ log.Fatal(err)
+ }
+}
diff --git a/docs/data-sources/postgresflexalpha_database.md b/docs/data-sources/postgresflexalpha_database.md
index 95c115e3..834d030c 100644
--- a/docs/data-sources/postgresflexalpha_database.md
+++ b/docs/data-sources/postgresflexalpha_database.md
@@ -3,12 +3,12 @@
page_title: "stackitprivatepreview_postgresflexalpha_database Data Source - stackitprivatepreview"
subcategory: ""
description: |-
-
+ Postgres Flex database resource schema. Must have a region specified in the provider configuration.
---
# stackitprivatepreview_postgresflexalpha_database (Data Source)
-
+Postgres Flex database resource schema. Must have a `region` specified in the provider configuration.
## Example Usage
@@ -25,14 +25,16 @@ data "stackitprivatepreview_postgresflexalpha_database" "example" {
### Required
-- `database_id` (Number) The ID of the database.
-- `instance_id` (String) The ID of the instance.
-- `project_id` (String) The STACKIT project ID.
-- `region` (String) The region which should be addressed
+- `instance_id` (String) ID of the Postgres Flex instance.
+- `project_id` (String) STACKIT project ID to which the instance is associated.
+
+### Optional
+
+- `database_id` (Number) Database ID.
+- `name` (String) Database name.
+- `region` (String) The resource region. If not defined, the provider region is used.
### Read-Only
-- `id` (String) Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`,`database_id`\".",
-- `name` (String) The name of the database.
-- `owner` (String) The owner of the database.
-- `tf_original_api_id` (Number) The id of the database.
+- `id` (String) Terraform's internal resource ID. It is structured as "`project_id`,`region`,`instance_id`,`database_id`".
+- `owner` (String) Username of the database owner.
diff --git a/docs/data-sources/postgresflexalpha_flavor.md b/docs/data-sources/postgresflexalpha_flavor.md
index 24c79829..4d28ffc3 100644
--- a/docs/data-sources/postgresflexalpha_flavor.md
+++ b/docs/data-sources/postgresflexalpha_flavor.md
@@ -10,18 +10,7 @@ description: |-
-## Example Usage
-```terraform
-data "stackitprivatepreview_postgresflexalpha_flavor" "flavor" {
- project_id = var.project_id
- region = var.region
- cpu = 4
- ram = 16
- node_type = "Single"
- storage_class = "premium-perf2-stackit"
-}
-```
## Schema
diff --git a/docs/data-sources/postgresflexalpha_flavors.md b/docs/data-sources/postgresflexalpha_flavors.md
index 06645bb4..f90ae257 100644
--- a/docs/data-sources/postgresflexalpha_flavors.md
+++ b/docs/data-sources/postgresflexalpha_flavors.md
@@ -38,12 +38,12 @@ Read-Only:
- `cpu` (Number) The cpu count of the instance.
- `description` (String) The flavor description.
+- `id` (String) The id of the instance flavor.
- `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte.
- `memory` (Number) The memory of the instance in Gibibyte.
- `min_gb` (Number) minimum storage which is required to order in Gigabyte.
- `node_type` (String) defines the nodeType it can be either single or replica
- `storage_classes` (Attributes List) maximum storage which can be ordered for the flavor in Gigabyte. (see [below for nested schema](#nestedatt--flavors--storage_classes))
-- `tf_original_api_id` (String) The id of the instance flavor.
### Nested Schema for `flavors.storage_classes`
diff --git a/docs/data-sources/postgresflexalpha_instance.md b/docs/data-sources/postgresflexalpha_instance.md
index d21a5f10..b254eb7d 100644
--- a/docs/data-sources/postgresflexalpha_instance.md
+++ b/docs/data-sources/postgresflexalpha_instance.md
@@ -30,13 +30,13 @@ data "stackitprivatepreview_postgresflexalpha_instance" "example" {
### Read-Only
-- `acl` (List of String) List of IPV4 cidr.
-- `backup_schedule` (String) The schedule for when the database backup will be created. Currently, ONLY daily schedules are supported (every 24 hours). The schedule is written as a cron schedule.
-- `connection_info` (Attributes) The connection information of the instance (see [below for nested schema](#nestedatt--connection_info))
+- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
+- `connection_info` (Attributes) The DNS name and port in the instance overview (see [below for nested schema](#nestedatt--connection_info))
- `encryption` (Attributes) The configuration for instance's volume and backup storage encryption.
-⚠ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected. (see [below for nested schema](#nestedatt--encryption))
+⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected. (see [below for nested schema](#nestedatt--encryption))
- `flavor_id` (String) The id of the instance flavor.
+- `id` (String) The ID of the instance.
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
- `name` (String) The name of the instance.
- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
@@ -44,7 +44,6 @@ data "stackitprivatepreview_postgresflexalpha_instance" "example" {
- `retention_days` (Number) How long backups are retained. The value can only be between 32 and 365 days.
- `status` (String) The current status of the instance.
- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
-- `tf_original_api_id` (String) The ID of the instance.
- `version` (String) The Postgres version used for the instance. See [Versions Endpoint](/documentation/postgres-flex-service/version/v3alpha1#tag/Version) for supported version parameters.
@@ -52,18 +51,10 @@ data "stackitprivatepreview_postgresflexalpha_instance" "example" {
Read-Only:
-- `write` (Attributes) The DNS name and port in the instance overview (see [below for nested schema](#nestedatt--connection_info--write))
-
-
-### Nested Schema for `connection_info.write`
-
-Read-Only:
-
- `host` (String) The host of the instance.
- `port` (Number) The port of the instance.
-
### Nested Schema for `encryption`
diff --git a/docs/data-sources/postgresflexalpha_user.md b/docs/data-sources/postgresflexalpha_user.md
index c3553c7b..1cda4f62 100644
--- a/docs/data-sources/postgresflexalpha_user.md
+++ b/docs/data-sources/postgresflexalpha_user.md
@@ -3,12 +3,12 @@
page_title: "stackitprivatepreview_postgresflexalpha_user Data Source - stackitprivatepreview"
subcategory: ""
description: |-
-
+ Postgres Flex user data source schema. Must have a region specified in the provider configuration.
---
# stackitprivatepreview_postgresflexalpha_user (Data Source)
-
+Postgres Flex user data source schema. Must have a `region` specified in the provider configuration.
## Example Usage
@@ -25,18 +25,20 @@ data "stackitprivatepreview_postgresflexalpha_user" "example" {
### Required
-- `instance_id` (String) The ID of the instance.
-- `project_id` (String) The STACKIT project ID.
-- `region` (String) The region which should be addressed
-- `user_id` (Number) The ID of the user.
+- `instance_id` (String) ID of the PostgresFlex instance.
+- `project_id` (String) STACKIT project ID to which the instance is associated.
+- `user_id` (String) User ID.
### Optional
-- `id` (String) Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`,`user_id`\".",
+- `region` (String) The resource region. If not defined, the provider region is used.
### Read-Only
-- `name` (String) The name of the user.
-- `roles` (List of String) A list of user roles.
+- `connection_string` (String) The connection string for the user to the instance.
+- `host` (String) The host address for the user to connect to the instance.
+- `id` (String) Terraform's internal data source. ID. It is structured as "`project_id`,`region`,`instance_id`,`user_id`".
+- `port` (Number) The port number for the user to connect to the instance.
+- `roles` (Set of String) The roles assigned to the user.
- `status` (String) The current status of the user.
-- `tf_original_api_id` (Number) The ID of the user.
+- `username` (String) The name of the user.
diff --git a/docs/data-sources/sqlserverflexalpha_database.md b/docs/data-sources/sqlserverflexalpha_database.md
index df66ffb7..4aab99cc 100644
--- a/docs/data-sources/sqlserverflexalpha_database.md
+++ b/docs/data-sources/sqlserverflexalpha_database.md
@@ -26,7 +26,6 @@ description: |-
- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
- `compatibility_level` (Number) CompatibilityLevel of the Database.
-- `id` (String) The terraform internal identifier.
+- `id` (Number) The id of the database.
- `name` (String) The name of the database.
- `owner` (String) The owner of the database.
-- `tf_original_api_id` (Number) The id of the database.
diff --git a/docs/data-sources/sqlserverflexalpha_flavor.md b/docs/data-sources/sqlserverflexalpha_flavor.md
new file mode 100644
index 00000000..426a0605
--- /dev/null
+++ b/docs/data-sources/sqlserverflexalpha_flavor.md
@@ -0,0 +1,43 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "stackitprivatepreview_sqlserverflexalpha_flavor Data Source - stackitprivatepreview"
+subcategory: ""
+description: |-
+
+---
+
+# stackitprivatepreview_sqlserverflexalpha_flavor (Data Source)
+
+
+
+
+
+
+## Schema
+
+### Required
+
+- `cpu` (Number) The cpu count of the instance.
+- `node_type` (String) defines the nodeType it can be either single or replica
+- `project_id` (String) The cpu count of the instance.
+- `ram` (Number) The memory of the instance in Gibibyte.
+- `region` (String) The flavor description.
+- `storage_class` (String) The memory of the instance in Gibibyte.
+
+### Read-Only
+
+- `description` (String) The flavor description.
+- `flavor_id` (String) The flavor id of the instance flavor.
+- `id` (String) The terraform id of the instance flavor.
+- `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte.
+- `min_gb` (Number) minimum storage which is required to order in Gigabyte.
+- `storage_classes` (Attributes List) (see [below for nested schema](#nestedatt--storage_classes))
+
+
+### Nested Schema for `storage_classes`
+
+Read-Only:
+
+- `class` (String)
+- `max_io_per_sec` (Number)
+- `max_through_in_mb` (Number)
diff --git a/docs/data-sources/sqlserverflexalpha_instance.md b/docs/data-sources/sqlserverflexalpha_instance.md
index b05d7b8e..9627892a 100644
--- a/docs/data-sources/sqlserverflexalpha_instance.md
+++ b/docs/data-sources/sqlserverflexalpha_instance.md
@@ -3,12 +3,12 @@
page_title: "stackitprivatepreview_sqlserverflexalpha_instance Data Source - stackitprivatepreview"
subcategory: ""
description: |-
-
+ SQLServer Flex ALPHA instance resource schema. Must have a region specified in the provider configuration.
---
# stackitprivatepreview_sqlserverflexalpha_instance (Data Source)
-
+SQLServer Flex ALPHA instance resource schema. Must have a `region` specified in the provider configuration.
## Example Usage
@@ -24,48 +24,61 @@ data "stackitprivatepreview_sqlserverflexalpha_instance" "example" {
### Required
-- `instance_id` (String) The ID of the instance.
-- `project_id` (String) The STACKIT project ID.
-- `region` (String) The region which should be addressed
+- `instance_id` (String) ID of the SQLServer Flex instance.
+- `project_id` (String) STACKIT project ID to which the instance is associated.
+
+### Optional
+
+- `region` (String) The resource region. If not defined, the provider region is used.
### Read-Only
-- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
-- `edition` (String) Edition of the MSSQL server instance
-- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption))
-- `flavor_id` (String) The id of the instance flavor.
-- `is_deletable` (Boolean) Whether the instance can be deleted or not.
-- `name` (String) The name of the instance.
-- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
-- `replicas` (Number) How many replicas the instance should have.
-- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365
+- `backup_schedule` (String) The backup schedule. Should follow the cron scheduling system format (e.g. "0 0 * * *")
+- `edition` (String)
+- `encryption` (Attributes) The encryption block. (see [below for nested schema](#nestedatt--encryption))
+- `flavor` (Attributes) (see [below for nested schema](#nestedatt--flavor))
+- `id` (String) Terraform's internal resource ID. It is structured as "`project_id`,`region`,`instance_id`".
+- `is_deletable` (Boolean)
+- `name` (String) Instance name.
+- `network` (Attributes) The network block. (see [below for nested schema](#nestedatt--network))
+- `replicas` (Number)
+- `retention_days` (Number)
- `status` (String)
-- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
-- `tf_original_api_id` (String) The ID of the instance.
-- `version` (String) The sqlserver version used for the instance.
+- `storage` (Attributes) (see [below for nested schema](#nestedatt--storage))
+- `version` (String)
### Nested Schema for `encryption`
Read-Only:
-- `kek_key_id` (String) The key identifier
-- `kek_key_ring_id` (String) The keyring identifier
-- `kek_key_version` (String) The key version
+- `key_id` (String) STACKIT KMS - Key ID of the encryption key to use.
+- `key_version` (String) STACKIT KMS - Key version to use in the encryption key.
+- `keyring_id` (String) STACKIT KMS - KeyRing ID of the encryption key to use.
- `service_account` (String)
+
+### Nested Schema for `flavor`
+
+Read-Only:
+
+- `cpu` (Number)
+- `description` (String)
+- `id` (String)
+- `node_type` (String)
+- `ram` (Number)
+
+
### Nested Schema for `network`
Read-Only:
-- `access_scope` (String) The network access scope of the instance
-
-⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.
-- `acl` (List of String) List of IPV4 cidr.
-- `instance_address` (String)
-- `router_address` (String)
+- `access_scope` (String) The access scope of the instance. (e.g. SNA)
+- `acl` (List of String) The Access Control List (ACL) for the SQLServer Flex instance.
+- `instance_address` (String) The returned instance IP address of the SQLServer Flex instance.
+- `router_address` (String) The returned router IP address of the SQLServer Flex instance.
@@ -73,5 +86,5 @@ Read-Only:
Read-Only:
-- `class` (String) The storage class for the storage.
-- `size` (Number) The storage size in Gigabytes.
+- `class` (String)
+- `size` (Number)
diff --git a/docs/data-sources/sqlserverflexalpha_user.md b/docs/data-sources/sqlserverflexalpha_user.md
index 63526135..b0b15341 100644
--- a/docs/data-sources/sqlserverflexalpha_user.md
+++ b/docs/data-sources/sqlserverflexalpha_user.md
@@ -3,12 +3,12 @@
page_title: "stackitprivatepreview_sqlserverflexalpha_user Data Source - stackitprivatepreview"
subcategory: ""
description: |-
-
+ SQLServer Flex user data source schema. Must have a region specified in the provider configuration.
---
# stackitprivatepreview_sqlserverflexalpha_user (Data Source)
-
+SQLServer Flex user data source schema. Must have a `region` specified in the provider configuration.
## Example Usage
@@ -25,38 +25,20 @@ data "stackitprivatepreview_sqlserverflexalpha_user" "example" {
### Required
-- `instance_id` (String) The ID of the instance.
-- `project_id` (String) The STACKIT project ID.
-- `region` (String) The region which should be addressed
+- `instance_id` (String) ID of the SQLServer Flex instance.
+- `project_id` (String) STACKIT project ID to which the instance is associated.
+- `user_id` (Number) User ID.
### Optional
-- `page` (Number) Number of the page of items list to be returned.
-- `size` (Number) Number of items to be returned on each page.
-- `sort` (String) Sorting of the users to be returned on each page.
+- `region` (String) The resource region. If not defined, the provider region is used.
### Read-Only
-- `pagination` (Attributes) (see [below for nested schema](#nestedatt--pagination))
-- `users` (Attributes List) List of all users inside an instance (see [below for nested schema](#nestedatt--users))
-
-
-### Nested Schema for `pagination`
-
-Read-Only:
-
-- `page` (Number)
-- `size` (Number)
-- `sort` (String)
-- `total_pages` (Number)
-- `total_rows` (Number)
-
-
-
-### Nested Schema for `users`
-
-Read-Only:
-
-- `status` (String) The current status of the user.
-- `tf_original_api_id` (Number) The ID of the user.
-- `username` (String) The name of the user.
+- `default_database` (String)
+- `host` (String)
+- `id` (String) Terraform's internal data source. ID. It is structured as "`project_id`,`region`,`instance_id`,`user_id`".
+- `port` (Number)
+- `roles` (Set of String) Database access levels for the user.
+- `status` (String)
+- `username` (String) Username of the SQLServer Flex instance.
diff --git a/docs/data-sources/sqlserverflexalpha_version.md b/docs/data-sources/sqlserverflexalpha_version.md
new file mode 100644
index 00000000..c9c61732
--- /dev/null
+++ b/docs/data-sources/sqlserverflexalpha_version.md
@@ -0,0 +1,35 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "stackitprivatepreview_sqlserverflexalpha_version Data Source - stackitprivatepreview"
+subcategory: ""
+description: |-
+
+---
+
+# stackitprivatepreview_sqlserverflexalpha_version (Data Source)
+
+
+
+
+
+
+## Schema
+
+### Required
+
+- `project_id` (String) The STACKIT project ID.
+- `region` (String) The region which should be addressed
+
+### Read-Only
+
+- `versions` (Attributes List) A list containing available sqlserver versions. (see [below for nested schema](#nestedatt--versions))
+
+
+### Nested Schema for `versions`
+
+Read-Only:
+
+- `beta` (Boolean) Flag if the version is a beta version. If set the version may contain bugs and is not fully tested.
+- `deprecated` (String) Timestamp in RFC3339 format which says when the version will no longer be supported by STACKIT.
+- `recommend` (Boolean) Flag if the version is recommend by the STACKIT Team.
+- `version` (String) The sqlserver version used for the instance.
diff --git a/docs/data-sources/sqlserverflexbeta_database.md b/docs/data-sources/sqlserverflexbeta_database.md
deleted file mode 100644
index 9322049f..00000000
--- a/docs/data-sources/sqlserverflexbeta_database.md
+++ /dev/null
@@ -1,40 +0,0 @@
----
-# generated by https://github.com/hashicorp/terraform-plugin-docs
-page_title: "stackitprivatepreview_sqlserverflexbeta_database Data Source - stackitprivatepreview"
-subcategory: ""
-description: |-
-
----
-
-# stackitprivatepreview_sqlserverflexbeta_database (Data Source)
-
-
-
-## Example Usage
-
-```terraform
-data "stackitprivatepreview_sqlserverflexbeta_database" "example" {
- project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
- instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
- database_name = "dbname"
-}
-```
-
-
-## Schema
-
-### Required
-
-- `database_name` (String) The name of the database.
-- `instance_id` (String) The ID of the instance.
-- `project_id` (String) The STACKIT project ID.
-- `region` (String) The region which should be addressed
-
-### Read-Only
-
-- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
-- `compatibility_level` (Number) CompatibilityLevel of the Database.
-- `id` (String) The terraform internal identifier.
-- `name` (String) The name of the database.
-- `owner` (String) The owner of the database.
-- `tf_original_api_id` (Number) The id of the database.
diff --git a/docs/data-sources/sqlserverflexbeta_instance.md b/docs/data-sources/sqlserverflexbeta_instance.md
deleted file mode 100644
index 431f95f1..00000000
--- a/docs/data-sources/sqlserverflexbeta_instance.md
+++ /dev/null
@@ -1,77 +0,0 @@
----
-# generated by https://github.com/hashicorp/terraform-plugin-docs
-page_title: "stackitprivatepreview_sqlserverflexbeta_instance Data Source - stackitprivatepreview"
-subcategory: ""
-description: |-
-
----
-
-# stackitprivatepreview_sqlserverflexbeta_instance (Data Source)
-
-
-
-## Example Usage
-
-```terraform
-data "stackitprivatepreview_sqlserverflexbeta_instance" "example" {
- project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
- instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
-}
-```
-
-
-## Schema
-
-### Required
-
-- `instance_id` (String) The ID of the instance.
-- `project_id` (String) The STACKIT project ID.
-- `region` (String) The region which should be addressed
-
-### Read-Only
-
-- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
-- `edition` (String) Edition of the MSSQL server instance
-- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption))
-- `flavor_id` (String) The id of the instance flavor.
-- `is_deletable` (Boolean) Whether the instance can be deleted or not.
-- `name` (String) The name of the instance.
-- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
-- `replicas` (Number) How many replicas the instance should have.
-- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365
-- `status` (String)
-- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
-- `tf_original_api_id` (String) The ID of the instance.
-- `version` (String) The sqlserver version used for the instance.
-
-
-### Nested Schema for `encryption`
-
-Read-Only:
-
-- `kek_key_id` (String) The key identifier
-- `kek_key_ring_id` (String) The keyring identifier
-- `kek_key_version` (String) The key version
-- `service_account` (String)
-
-
-
-### Nested Schema for `network`
-
-Read-Only:
-
-- `access_scope` (String) The network access scope of the instance
-
-⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.
-- `acl` (List of String) List of IPV4 cidr.
-- `instance_address` (String)
-- `router_address` (String)
-
-
-
-### Nested Schema for `storage`
-
-Read-Only:
-
-- `class` (String) The storage class for the storage.
-- `size` (Number) The storage size in Gigabytes.
diff --git a/docs/data-sources/sqlserverflexbeta_user.md b/docs/data-sources/sqlserverflexbeta_user.md
deleted file mode 100644
index f87f454e..00000000
--- a/docs/data-sources/sqlserverflexbeta_user.md
+++ /dev/null
@@ -1,54 +0,0 @@
----
-# generated by https://github.com/hashicorp/terraform-plugin-docs
-page_title: "stackitprivatepreview_sqlserverflexbeta_user Data Source - stackitprivatepreview"
-subcategory: ""
-description: |-
-
----
-
-# stackitprivatepreview_sqlserverflexbeta_user (Data Source)
-
-
-
-
-
-
-## Schema
-
-### Required
-
-- `instance_id` (String) The ID of the instance.
-- `project_id` (String) The STACKIT project ID.
-- `region` (String) The region which should be addressed
-
-### Optional
-
-- `page` (Number) Number of the page of items list to be returned.
-- `size` (Number) Number of items to be returned on each page.
-- `sort` (String) Sorting of the users to be returned on each page.
-
-### Read-Only
-
-- `pagination` (Attributes) (see [below for nested schema](#nestedatt--pagination))
-- `users` (Attributes List) List of all users inside an instance (see [below for nested schema](#nestedatt--users))
-
-
-### Nested Schema for `pagination`
-
-Read-Only:
-
-- `page` (Number)
-- `size` (Number)
-- `sort` (String)
-- `total_pages` (Number)
-- `total_rows` (Number)
-
-
-
-### Nested Schema for `users`
-
-Read-Only:
-
-- `status` (String) The current status of the user.
-- `tf_original_api_id` (Number) The ID of the user.
-- `username` (String) The name of the user.
diff --git a/docs/index.md b/docs/index.md
index 84bc25b3..4f1e52cd 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -16,13 +16,14 @@ provider "stackitprivatepreview" {
default_region = "eu01"
}
-provider "stackitprivatepreview" {
- default_region = "eu01"
- service_account_key_path = "service_account.json"
-}
-
# Authentication
+# Token flow (scheduled for deprecation and will be removed on December 17, 2025)
+provider "stackitprivatepreview" {
+ default_region = "eu01"
+ service_account_token = var.service_account_token
+}
+
# Key flow
provider "stackitprivatepreview" {
default_region = "eu01"
diff --git a/docs/resources/postgresflexalpha_database.md b/docs/resources/postgresflexalpha_database.md
index 6c94fd62..8fdceeb5 100644
--- a/docs/resources/postgresflexalpha_database.md
+++ b/docs/resources/postgresflexalpha_database.md
@@ -3,12 +3,12 @@
page_title: "stackitprivatepreview_postgresflexalpha_database Resource - stackitprivatepreview"
subcategory: ""
description: |-
-
+ Postgres Flex database resource schema. Must have a region specified in the provider configuration.
---
# stackitprivatepreview_postgresflexalpha_database (Resource)
-
+Postgres Flex database resource schema. Must have a `region` specified in the provider configuration.
## Example Usage
@@ -25,16 +25,6 @@ import {
to = stackitprivatepreview_postgresflexalpha_database.import-example
id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.postgres_database_id}"
}
-
-import {
- to = stackitprivatepreview_postgresflexalpha_database.import-example
- identity = {
- project_id = "project_id"
- region = "region"
- instance_id = "instance_id"
- database_id = "database_id"
- }
-}
```
@@ -42,16 +32,16 @@ import {
### Required
-- `name` (String) The name of the database.
+- `instance_id` (String) ID of the Postgres Flex instance.
+- `name` (String) Database name.
+- `owner` (String) Username of the database owner.
+- `project_id` (String) STACKIT project ID to which the instance is associated.
### Optional
-- `database_id` (Number) The ID of the database.
-- `instance_id` (String) The ID of the instance.
-- `owner` (String) The owner of the database.
-- `project_id` (String) The STACKIT project ID.
-- `region` (String) The region which should be addressed
+- `region` (String) The resource region. If not defined, the provider region is used.
### Read-Only
-- `id` (Number) The id of the database.
+- `database_id` (Number) Database ID.
+- `id` (String) Terraform's internal resource ID. It is structured as "`project_id`,`region`,`instance_id`,`database_id`".
diff --git a/docs/resources/postgresflexalpha_instance.md b/docs/resources/postgresflexalpha_instance.md
index f6f10bcc..3dc7ef51 100644
--- a/docs/resources/postgresflexalpha_instance.md
+++ b/docs/resources/postgresflexalpha_instance.md
@@ -13,29 +13,21 @@ description: |-
## Example Usage
```terraform
-resource "stackitprivatepreview_postgresflexalpha_instance" "example-instance" {
+resource "stackitprivatepreview_postgresflexalpha_instance" "example" {
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
name = "example-instance"
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
- backup_schedule = "0 0 * * *"
- retention_days = 30
- flavor_id = "flavor.id"
- replicas = 1
+ backup_schedule = "00 00 * * *"
+ flavor = {
+ cpu = 2
+ ram = 4
+ }
+ replicas = 3
storage = {
- performance_class = "premium-perf2-stackit"
- size = 10
+ class = "class"
+ size = 5
}
- encryption = {
- kek_key_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
- kek_key_ring_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
- kek_key_version = 1
- service_account = "service@account.email"
- }
- network = {
- acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
- access_scope = "PUBLIC"
- }
- version = 17
+ version = 14
}
# Only use the import statement, if you want to import an existing postgresflex instance
@@ -43,15 +35,6 @@ import {
to = stackitprivatepreview_postgresflexalpha_instance.import-example
id = "${var.project_id},${var.region},${var.postgres_instance_id}"
}
-
-import {
- to = stackitprivatepreview_postgresflexalpha_instance.import-example
- identity = {
- project_id = var.project_id
- region = var.region
- instance_id = var.postgres_instance_id
- }
-}
```
@@ -59,7 +42,7 @@ import {
### Required
-- `backup_schedule` (String) The schedule for when the database backup will be created. Currently, ONLY daily schedules are supported (every 24 hours). The schedule is written as a cron schedule.
+- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
- `flavor_id` (String) The id of the instance flavor.
- `name` (String) The name of the instance.
- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
@@ -72,15 +55,14 @@ import {
- `encryption` (Attributes) The configuration for instance's volume and backup storage encryption.
-⚠ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected. (see [below for nested schema](#nestedatt--encryption))
+⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected. (see [below for nested schema](#nestedatt--encryption))
- `instance_id` (String) The ID of the instance.
- `project_id` (String) The STACKIT project ID.
- `region` (String) The region which should be addressed
### Read-Only
-- `acl` (List of String) List of IPV4 cidr.
-- `connection_info` (Attributes) The connection information of the instance (see [below for nested schema](#nestedatt--connection_info))
+- `connection_info` (Attributes) The DNS name and port in the instance overview (see [below for nested schema](#nestedatt--connection_info))
- `id` (String) The ID of the instance.
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
- `status` (String) The current status of the instance.
@@ -95,9 +77,6 @@ Required:
Optional:
- `access_scope` (String) The access scope of the instance. It defines if the instance is public or airgapped.
-
-Read-Only:
-
- `instance_address` (String)
- `router_address` (String)
@@ -127,12 +106,5 @@ Required:
Read-Only:
-- `write` (Attributes) The DNS name and port in the instance overview (see [below for nested schema](#nestedatt--connection_info--write))
-
-
-### Nested Schema for `connection_info.write`
-
-Read-Only:
-
- `host` (String) The host of the instance.
- `port` (Number) The port of the instance.
diff --git a/docs/resources/postgresflexalpha_user.md b/docs/resources/postgresflexalpha_user.md
index b83de15d..d3b12f9d 100644
--- a/docs/resources/postgresflexalpha_user.md
+++ b/docs/resources/postgresflexalpha_user.md
@@ -3,12 +3,12 @@
page_title: "stackitprivatepreview_postgresflexalpha_user Resource - stackitprivatepreview"
subcategory: ""
description: |-
-
+ Postgres Flex user resource schema. Must have a region specified in the provider configuration.
---
# stackitprivatepreview_postgresflexalpha_user (Resource)
-
+Postgres Flex user resource schema. Must have a `region` specified in the provider configuration.
## Example Usage
@@ -16,7 +16,7 @@ description: |-
resource "stackitprivatepreview_postgresflexalpha_user" "example" {
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
- name = "username"
+ username = "username"
roles = ["role"]
}
@@ -25,16 +25,6 @@ import {
to = stackitprivatepreview_postgresflexalpha_user.import-example
id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.user_id}"
}
-
-import {
- to = stackitprivatepreview_postgresflexalpha_user.import-example
- identity = {
- project_id = "project.id"
- region = "region"
- instance_id = "instance.id"
- user_id = "user.id"
- }
-}
```
@@ -42,18 +32,21 @@ import {
### Required
-- `name` (String) The name of the user.
+- `instance_id` (String) ID of the PostgresFlex instance.
+- `project_id` (String) STACKIT project ID to which the instance is associated.
+- `roles` (Set of String) Database access levels for the user. Possible values are: `login`, `createdb`, `createrole`.
+- `username` (String) The name of the user.
### Optional
-- `instance_id` (String) The ID of the instance.
-- `project_id` (String) The STACKIT project ID.
-- `region` (String) The region which should be addressed
-- `roles` (List of String) A list containing the user roles for the instance.
-- `user_id` (Number) The ID of the user.
+- `region` (String) The resource region. If not defined, the provider region is used.
### Read-Only
-- `id` (Number) The ID of the user.
-- `password` (String) The password for the user.
+- `connection_string` (String) The connection string for the user to the instance.
+- `host` (String) The host of the Postgres Flex instance.
+- `id` (String) Terraform's internal resource ID. It is structured as "`project_id`,`region`,`instance_id`,`user_id`".
+- `password` (String, Sensitive) The password for the user. This is only set upon creation.
+- `port` (Number) The port of the Postgres Flex instance.
- `status` (String) The current status of the user.
+- `user_id` (Number) User ID.
diff --git a/docs/resources/sqlserverflexalpha_database.md b/docs/resources/sqlserverflexalpha_database.md
index 7d8f050b..fd6ba0fd 100644
--- a/docs/resources/sqlserverflexalpha_database.md
+++ b/docs/resources/sqlserverflexalpha_database.md
@@ -10,34 +10,7 @@ description: |-
-## Example Usage
-```terraform
-resource "stackitprivatepreview_sqlserverflexalpha_database" "example" {
- project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
- instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
- collation = ""
- compatibility = "160"
- name = ""
- owner = ""
-}
-
-# Only use the import statement, if you want to import a existing sqlserverflex database
-import {
- to = stackitprivatepreview_sqlserverflexalpha_database.import-example
- id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
-}
-
-import {
- to = stackitprivatepreview_sqlserverflexalpha_database.import-example
- identity = {
- project_id = "project.id"
- region = "region"
- instance_id = "instance.id"
- database_id = "database.id"
- }
-}
-```
## Schema
diff --git a/docs/resources/sqlserverflexalpha_instance.md b/docs/resources/sqlserverflexalpha_instance.md
index 95e33673..d5926387 100644
--- a/docs/resources/sqlserverflexalpha_instance.md
+++ b/docs/resources/sqlserverflexalpha_instance.md
@@ -3,12 +3,12 @@
page_title: "stackitprivatepreview_sqlserverflexalpha_instance Resource - stackitprivatepreview"
subcategory: ""
description: |-
-
+ SQLServer Flex ALPHA instance resource schema. Must have a region specified in the provider configuration.
---
# stackitprivatepreview_sqlserverflexalpha_instance (Resource)
-
+SQLServer Flex ALPHA instance resource schema. Must have a `region` specified in the provider configuration.
## Example Usage
@@ -41,55 +41,41 @@ import {
### Required
-- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
-- `flavor_id` (String) The id of the instance flavor.
-- `name` (String) The name of the instance.
-- `network` (Attributes) the network configuration of the instance. (see [below for nested schema](#nestedatt--network))
-- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365
-- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
-- `version` (String) The sqlserver version used for the instance.
+- `flavor_id` (String)
+- `name` (String) Instance name.
+- `network` (Attributes) The network block. (see [below for nested schema](#nestedatt--network))
+- `project_id` (String) STACKIT project ID to which the instance is associated.
### Optional
-- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption))
-- `instance_id` (String) The ID of the instance.
-- `project_id` (String) The STACKIT project ID.
-- `region` (String) The region which should be addressed
+- `backup_schedule` (String) The backup schedule. Should follow the cron scheduling system format (e.g. "0 0 * * *")
+- `encryption` (Attributes) The encryption block. (see [below for nested schema](#nestedatt--encryption))
+- `is_deletable` (Boolean)
+- `region` (String) The resource region. If not defined, the provider region is used.
+- `retention_days` (Number)
+- `status` (String)
+- `storage` (Attributes) (see [below for nested schema](#nestedatt--storage))
+- `version` (String)
### Read-Only
-- `edition` (String) Edition of the MSSQL server instance
-- `id` (String) The ID of the instance.
-- `is_deletable` (Boolean) Whether the instance can be deleted or not.
-- `replicas` (Number) How many replicas the instance should have.
-- `status` (String)
+- `edition` (String)
+- `id` (String) Terraform's internal resource ID. It is structured as "`project_id`,`region`,`instance_id`".
+- `instance_id` (String) ID of the SQLServer Flex instance.
+- `replicas` (Number)
### Nested Schema for `network`
Required:
-- `acl` (List of String) List of IPV4 cidr.
-
-Optional:
-
-- `access_scope` (String) The network access scope of the instance
-
-⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.
+- `access_scope` (String) The access scope of the instance. (SNA | PUBLIC)
+- `acl` (List of String) The Access Control List (ACL) for the SQLServer Flex instance.
Read-Only:
-- `instance_address` (String)
-- `router_address` (String)
-
-
-
-### Nested Schema for `storage`
-
-Required:
-
-- `class` (String) The storage class for the storage.
-- `size` (Number) The storage size in Gigabytes.
+- `instance_address` (String) The returned instance IP address of the SQLServer Flex instance.
+- `router_address` (String) The returned router IP address of the SQLServer Flex instance.
@@ -97,7 +83,16 @@ Required:
Required:
-- `kek_key_id` (String) The key identifier
-- `kek_key_ring_id` (String) The keyring identifier
-- `kek_key_version` (String) The key version
+- `key_id` (String) STACKIT KMS - Key ID of the encryption key to use.
+- `key_version` (String) STACKIT KMS - Key version to use in the encryption key.
+- `keyring_id` (String) STACKIT KMS - KeyRing ID of the encryption key to use.
- `service_account` (String)
+
+
+
+### Nested Schema for `storage`
+
+Optional:
+
+- `class` (String)
+- `size` (Number)
diff --git a/docs/resources/sqlserverflexalpha_user.md b/docs/resources/sqlserverflexalpha_user.md
index 85d5350e..3f37556c 100644
--- a/docs/resources/sqlserverflexalpha_user.md
+++ b/docs/resources/sqlserverflexalpha_user.md
@@ -3,12 +3,12 @@
page_title: "stackitprivatepreview_sqlserverflexalpha_user Resource - stackitprivatepreview"
subcategory: ""
description: |-
-
+ SQLServer Flex user resource schema. Must have a region specified in the provider configuration.
---
# stackitprivatepreview_sqlserverflexalpha_user (Resource)
-
+SQLServer Flex user resource schema. Must have a `region` specified in the provider configuration.
## Example Usage
@@ -32,22 +32,21 @@ import {
### Required
-- `roles` (List of String) A list containing the user roles for the instance. A list with the valid user roles can be retrieved using the List Roles endpoint.
-- `username` (String) The name of the user.
+- `instance_id` (String) ID of the SQLServer Flex instance.
+- `project_id` (String) STACKIT project ID to which the instance is associated.
+- `roles` (Set of String) Database access levels for the user. The values for the default roles are: `##STACKIT_DatabaseManager##`, `##STACKIT_LoginManager##`, `##STACKIT_ProcessManager##`, `##STACKIT_ServerManager##`, `##STACKIT_SQLAgentManager##`, `##STACKIT_SQLAgentUser##`
+- `username` (String) Username of the SQLServer Flex instance.
### Optional
-- `default_database` (String) The default database for a user of the instance.
-- `instance_id` (String) The ID of the instance.
-- `project_id` (String) The STACKIT project ID.
-- `region` (String) The region which should be addressed
-- `user_id` (Number) The ID of the user.
+- `region` (String)
### Read-Only
-- `host` (String) The host of the instance in which the user belongs to.
-- `id` (Number) The ID of the user.
-- `password` (String) The password for the user.
-- `port` (Number) The port of the instance in which the user belongs to.
-- `status` (String) The current status of the user.
-- `uri` (String) The connection string for the user to the instance.
+- `default_database` (String)
+- `host` (String)
+- `id` (String) Terraform's internal resource ID. It is structured as "`project_id`,`region`,`instance_id`,`user_id`".
+- `password` (String, Sensitive) Password of the user account.
+- `port` (Number)
+- `status` (String)
+- `user_id` (Number) User ID.
diff --git a/docs/resources/sqlserverflexbeta_database.md b/docs/resources/sqlserverflexbeta_database.md
deleted file mode 100644
index fabaaccb..00000000
--- a/docs/resources/sqlserverflexbeta_database.md
+++ /dev/null
@@ -1,51 +0,0 @@
----
-# generated by https://github.com/hashicorp/terraform-plugin-docs
-page_title: "stackitprivatepreview_sqlserverflexbeta_database Resource - stackitprivatepreview"
-subcategory: ""
-description: |-
-
----
-
-# stackitprivatepreview_sqlserverflexbeta_database (Resource)
-
-
-
-## Example Usage
-
-```terraform
-resource "stackitprivatepreview_sqlserverflexalpha_user" "example" {
- project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
- instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
- username = "username"
- roles = ["role"]
-}
-
-# Only use the import statement, if you want to import an existing sqlserverflex user
-import {
- to = stackitprivatepreview_sqlserverflexalpha_user.import-example
- id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
-}
-```
-
-
-## Schema
-
-### Required
-
-- `name` (String) The name of the database.
-- `owner` (String) The owner of the database.
-
-### Optional
-
-- `collation` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
-- `compatibility` (Number) CompatibilityLevel of the Database.
-- `database_name` (String) The name of the database.
-- `instance_id` (String) The ID of the instance.
-- `project_id` (String) The STACKIT project ID.
-- `region` (String) The region which should be addressed
-
-### Read-Only
-
-- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
-- `compatibility_level` (Number) CompatibilityLevel of the Database.
-- `id` (Number) The id of the database.
diff --git a/docs/resources/sqlserverflexbeta_instance.md b/docs/resources/sqlserverflexbeta_instance.md
deleted file mode 100644
index 20f5a9bc..00000000
--- a/docs/resources/sqlserverflexbeta_instance.md
+++ /dev/null
@@ -1,158 +0,0 @@
----
-# generated by https://github.com/hashicorp/terraform-plugin-docs
-page_title: "stackitprivatepreview_sqlserverflexbeta_instance Resource - stackitprivatepreview"
-subcategory: ""
-description: |-
-
----
-
-# stackitprivatepreview_sqlserverflexbeta_instance (Resource)
-
-
-
-## Example Usage
-
-```terraform
-# without encryption and SNA
-resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" {
- project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
- name = "example-instance"
- backup_schedule = "0 3 * * *"
- retention_days = 31
- flavor_id = "flavor_id"
- storage = {
- class = "premium-perf2-stackit"
- size = 50
- }
- version = 2022
- network = {
- acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
- access_scope = "SNA"
- }
-}
-
-# without encryption and PUBLIC
-resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" {
- project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
- name = "example-instance"
- backup_schedule = "0 3 * * *"
- retention_days = 31
- flavor_id = "flavor_id"
- storage = {
- class = "premium-perf2-stackit"
- size = 50
- }
- version = 2022
- network = {
- acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
- access_scope = "PUBLIC"
- }
-}
-
-# with encryption and SNA
-resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" {
- project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
- name = "example-instance"
- backup_schedule = "0 3 * * *"
- retention_days = 31
- flavor_id = "flavor_id"
- storage = {
- class = "premium-perf2-stackit"
- size = 50
- }
- version = 2022
- encryption = {
- kek_key_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
- kek_key_ring_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
- kek_key_version = 1
- service_account = "service_account@email"
- }
- network = {
- acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
- access_scope = "SNA"
- }
-}
-
-
-# Only use the import statement, if you want to import an existing sqlserverflex instance
-import {
- to = stackitprivatepreview_sqlserverflexalpha_instance.import-example
- id = "${var.project_id},${var.region},${var.sql_instance_id}"
-}
-
-# import with identity
-import {
- to = stackitprivatepreview_sqlserverflexalpha_instance.import-example
- identity = {
- project_id = var.project_id
- region = var.region
- instance_id = var.sql_instance_id
- }
-}
-```
-
-
-## Schema
-
-### Required
-
-- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
-- `flavor_id` (String) The id of the instance flavor.
-- `name` (String) The name of the instance.
-- `network` (Attributes) the network configuration of the instance. (see [below for nested schema](#nestedatt--network))
-- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365
-- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
-- `version` (String) The sqlserver version used for the instance.
-
-### Optional
-
-- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption))
-- `instance_id` (String) The ID of the instance.
-- `project_id` (String) The STACKIT project ID.
-- `region` (String) The region which should be addressed
-
-### Read-Only
-
-- `edition` (String) Edition of the MSSQL server instance
-- `id` (String) The ID of the instance.
-- `is_deletable` (Boolean) Whether the instance can be deleted or not.
-- `replicas` (Number) How many replicas the instance should have.
-- `status` (String)
-
-
-### Nested Schema for `network`
-
-Required:
-
-- `acl` (List of String) List of IPV4 cidr.
-
-Optional:
-
-- `access_scope` (String) The network access scope of the instance
-
-⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.
-
-Read-Only:
-
-- `instance_address` (String)
-- `router_address` (String)
-
-
-
-### Nested Schema for `storage`
-
-Required:
-
-- `class` (String) The storage class for the storage.
-- `size` (Number) The storage size in Gigabytes.
-
-
-
-### Nested Schema for `encryption`
-
-Required:
-
-- `kek_key_id` (String) The key identifier
-- `kek_key_ring_id` (String) The keyring identifier
-- `kek_key_version` (String) The key version
-- `service_account` (String)
diff --git a/docs/resources/sqlserverflexbeta_user.md b/docs/resources/sqlserverflexbeta_user.md
deleted file mode 100644
index 81d6da28..00000000
--- a/docs/resources/sqlserverflexbeta_user.md
+++ /dev/null
@@ -1,53 +0,0 @@
----
-# generated by https://github.com/hashicorp/terraform-plugin-docs
-page_title: "stackitprivatepreview_sqlserverflexbeta_user Resource - stackitprivatepreview"
-subcategory: ""
-description: |-
-
----
-
-# stackitprivatepreview_sqlserverflexbeta_user (Resource)
-
-
-
-## Example Usage
-
-```terraform
-resource "stackitprivatepreview_sqlserverflexalpha_user" "example" {
- project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
- instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
- username = "username"
- roles = ["role"]
-}
-
-# Only use the import statement, if you want to import an existing sqlserverflex user
-import {
- to = stackitprivatepreview_sqlserverflexalpha_user.import-example
- id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
-}
-```
-
-
-## Schema
-
-### Required
-
-- `roles` (List of String) A list containing the user roles for the instance. A list with the valid user roles can be retrieved using the List Roles endpoint.
-- `username` (String) The name of the user.
-
-### Optional
-
-- `default_database` (String) The default database for a user of the instance.
-- `instance_id` (String) The ID of the instance.
-- `project_id` (String) The STACKIT project ID.
-- `region` (String) The region which should be addressed
-- `user_id` (Number) The ID of the user.
-
-### Read-Only
-
-- `host` (String) The host of the instance in which the user belongs to.
-- `id` (Number) The ID of the user.
-- `password` (String) The password for the user.
-- `port` (Number) The port of the instance in which the user belongs to.
-- `status` (String) The current status of the user.
-- `uri` (String) The connection string for the user to the instance.
diff --git a/examples/data-sources/stackitprivatepreview_postgresflexalpha_flavor/data-source.tf b/examples/data-sources/stackitprivatepreview_postgresflexalpha_flavor/data-source.tf
deleted file mode 100644
index 67017935..00000000
--- a/examples/data-sources/stackitprivatepreview_postgresflexalpha_flavor/data-source.tf
+++ /dev/null
@@ -1,8 +0,0 @@
-data "stackitprivatepreview_postgresflexalpha_flavor" "flavor" {
- project_id = var.project_id
- region = var.region
- cpu = 4
- ram = 16
- node_type = "Single"
- storage_class = "premium-perf2-stackit"
-}
diff --git a/examples/data-sources/stackitprivatepreview_sqlserverflexalpha_flavor/data-source.tf b/examples/data-sources/stackitprivatepreview_sqlserverflexalpha_flavor/data-source.tf
deleted file mode 100644
index 25d94537..00000000
--- a/examples/data-sources/stackitprivatepreview_sqlserverflexalpha_flavor/data-source.tf
+++ /dev/null
@@ -1,8 +0,0 @@
-data "stackitprivatepreview_sqlserverflexalpha_flavor" "flavor" {
- project_id = var.project_id
- region = var.region
- cpu = 4
- ram = 16
- node_type = "Single"
- storage_class = "premium-perf2-stackit"
-}
diff --git a/examples/data-sources/stackitprivatepreview_sqlserverflexbeta_database/data-source.tf b/examples/data-sources/stackitprivatepreview_sqlserverflexbeta_database/data-source.tf
deleted file mode 100644
index 894fcd33..00000000
--- a/examples/data-sources/stackitprivatepreview_sqlserverflexbeta_database/data-source.tf
+++ /dev/null
@@ -1,5 +0,0 @@
-data "stackitprivatepreview_sqlserverflexbeta_database" "example" {
- project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
- instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
- database_name = "dbname"
-}
diff --git a/examples/data-sources/stackitprivatepreview_sqlserverflexbeta_flavor/data-source.tf b/examples/data-sources/stackitprivatepreview_sqlserverflexbeta_flavor/data-source.tf
deleted file mode 100644
index f40b9680..00000000
--- a/examples/data-sources/stackitprivatepreview_sqlserverflexbeta_flavor/data-source.tf
+++ /dev/null
@@ -1,8 +0,0 @@
-data "stackitprivatepreview_sqlserverflexbeta_flavor" "flavor" {
- project_id = var.project_id
- region = var.region
- cpu = 4
- ram = 16
- node_type = "Single"
- storage_class = "premium-perf2-stackit"
-}
diff --git a/examples/data-sources/stackitprivatepreview_sqlserverflexbeta_instance/data-source.tf b/examples/data-sources/stackitprivatepreview_sqlserverflexbeta_instance/data-source.tf
deleted file mode 100644
index b8c8fc2b..00000000
--- a/examples/data-sources/stackitprivatepreview_sqlserverflexbeta_instance/data-source.tf
+++ /dev/null
@@ -1,4 +0,0 @@
-data "stackitprivatepreview_sqlserverflexbeta_instance" "example" {
- project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
- instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
-}
diff --git a/examples/provider/provider.tf b/examples/provider/provider.tf
index 4db0aed3..1795874c 100644
--- a/examples/provider/provider.tf
+++ b/examples/provider/provider.tf
@@ -2,13 +2,14 @@ provider "stackitprivatepreview" {
default_region = "eu01"
}
-provider "stackitprivatepreview" {
- default_region = "eu01"
- service_account_key_path = "service_account.json"
-}
-
# Authentication
+# Token flow (scheduled for deprecation and will be removed on December 17, 2025)
+provider "stackitprivatepreview" {
+ default_region = "eu01"
+ service_account_token = var.service_account_token
+}
+
# Key flow
provider "stackitprivatepreview" {
default_region = "eu01"
@@ -22,3 +23,4 @@ provider "stackitprivatepreview" {
service_account_key_path = var.service_account_key_path
private_key_path = var.private_key_path
}
+
diff --git a/examples/resources/stackitprivatepreview_postgresflexalpha_database/resource.tf b/examples/resources/stackitprivatepreview_postgresflexalpha_database/resource.tf
index ad0c051e..a013b9c6 100644
--- a/examples/resources/stackitprivatepreview_postgresflexalpha_database/resource.tf
+++ b/examples/resources/stackitprivatepreview_postgresflexalpha_database/resource.tf
@@ -9,14 +9,4 @@ resource "stackitprivatepreview_postgresflexalpha_database" "example" {
import {
to = stackitprivatepreview_postgresflexalpha_database.import-example
id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.postgres_database_id}"
-}
-
-import {
- to = stackitprivatepreview_postgresflexalpha_database.import-example
- identity = {
- project_id = "project_id"
- region = "region"
- instance_id = "instance_id"
- database_id = "database_id"
- }
-}
+}
\ No newline at end of file
diff --git a/examples/resources/stackitprivatepreview_postgresflexalpha_instance/resource.tf b/examples/resources/stackitprivatepreview_postgresflexalpha_instance/resource.tf
index b503f0ce..99faf2e7 100644
--- a/examples/resources/stackitprivatepreview_postgresflexalpha_instance/resource.tf
+++ b/examples/resources/stackitprivatepreview_postgresflexalpha_instance/resource.tf
@@ -1,39 +1,22 @@
-resource "stackitprivatepreview_postgresflexalpha_instance" "example-instance" {
+resource "stackitprivatepreview_postgresflexalpha_instance" "example" {
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
name = "example-instance"
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
- backup_schedule = "0 0 * * *"
- retention_days = 30
- flavor_id = "flavor.id"
- replicas = 1
+ backup_schedule = "00 00 * * *"
+ flavor = {
+ cpu = 2
+ ram = 4
+ }
+ replicas = 3
storage = {
- performance_class = "premium-perf2-stackit"
- size = 10
+ class = "class"
+ size = 5
}
- encryption = {
- kek_key_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
- kek_key_ring_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
- kek_key_version = 1
- service_account = "service@account.email"
- }
- network = {
- acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
- access_scope = "PUBLIC"
- }
- version = 17
+ version = 14
}
# Only use the import statement, if you want to import an existing postgresflex instance
import {
to = stackitprivatepreview_postgresflexalpha_instance.import-example
id = "${var.project_id},${var.region},${var.postgres_instance_id}"
-}
-
-import {
- to = stackitprivatepreview_postgresflexalpha_instance.import-example
- identity = {
- project_id = var.project_id
- region = var.region
- instance_id = var.postgres_instance_id
- }
-}
+}
\ No newline at end of file
diff --git a/examples/resources/stackitprivatepreview_postgresflexalpha_user/resource.tf b/examples/resources/stackitprivatepreview_postgresflexalpha_user/resource.tf
index 695741c4..9ec5c419 100644
--- a/examples/resources/stackitprivatepreview_postgresflexalpha_user/resource.tf
+++ b/examples/resources/stackitprivatepreview_postgresflexalpha_user/resource.tf
@@ -1,7 +1,7 @@
resource "stackitprivatepreview_postgresflexalpha_user" "example" {
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
- name = "username"
+ username = "username"
roles = ["role"]
}
@@ -9,14 +9,4 @@ resource "stackitprivatepreview_postgresflexalpha_user" "example" {
import {
to = stackitprivatepreview_postgresflexalpha_user.import-example
id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.user_id}"
-}
-
-import {
- to = stackitprivatepreview_postgresflexalpha_user.import-example
- identity = {
- project_id = "project.id"
- region = "region"
- instance_id = "instance.id"
- user_id = "user.id"
- }
-}
+}
\ No newline at end of file
diff --git a/examples/resources/stackitprivatepreview_sqlserverflexalpha_database/resource.tf b/examples/resources/stackitprivatepreview_sqlserverflexalpha_database/resource.tf
deleted file mode 100644
index b85cc22b..00000000
--- a/examples/resources/stackitprivatepreview_sqlserverflexalpha_database/resource.tf
+++ /dev/null
@@ -1,24 +0,0 @@
-resource "stackitprivatepreview_sqlserverflexalpha_database" "example" {
- project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
- instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
- collation = ""
- compatibility = "160"
- name = ""
- owner = ""
-}
-
-# Only use the import statement, if you want to import a existing sqlserverflex database
-import {
- to = stackitprivatepreview_sqlserverflexalpha_database.import-example
- id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
-}
-
-import {
- to = stackitprivatepreview_sqlserverflexalpha_database.import-example
- identity = {
- project_id = "project.id"
- region = "region"
- instance_id = "instance.id"
- database_id = "database.id"
- }
-}
\ No newline at end of file
diff --git a/examples/resources/stackitprivatepreview_sqlserverflexbeta_database/resource.tf b/examples/resources/stackitprivatepreview_sqlserverflexbeta_database/resource.tf
deleted file mode 100644
index 83c52561..00000000
--- a/examples/resources/stackitprivatepreview_sqlserverflexbeta_database/resource.tf
+++ /dev/null
@@ -1,12 +0,0 @@
-resource "stackitprivatepreview_sqlserverflexalpha_user" "example" {
- project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
- instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
- username = "username"
- roles = ["role"]
-}
-
-# Only use the import statement, if you want to import an existing sqlserverflex user
-import {
- to = stackitprivatepreview_sqlserverflexalpha_user.import-example
- id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
-}
\ No newline at end of file
diff --git a/examples/resources/stackitprivatepreview_sqlserverflexbeta_instance/resource.tf b/examples/resources/stackitprivatepreview_sqlserverflexbeta_instance/resource.tf
deleted file mode 100644
index 06e88f64..00000000
--- a/examples/resources/stackitprivatepreview_sqlserverflexbeta_instance/resource.tf
+++ /dev/null
@@ -1,76 +0,0 @@
-# without encryption and SNA
-resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" {
- project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
- name = "example-instance"
- backup_schedule = "0 3 * * *"
- retention_days = 31
- flavor_id = "flavor_id"
- storage = {
- class = "premium-perf2-stackit"
- size = 50
- }
- version = 2022
- network = {
- acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
- access_scope = "SNA"
- }
-}
-
-# without encryption and PUBLIC
-resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" {
- project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
- name = "example-instance"
- backup_schedule = "0 3 * * *"
- retention_days = 31
- flavor_id = "flavor_id"
- storage = {
- class = "premium-perf2-stackit"
- size = 50
- }
- version = 2022
- network = {
- acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
- access_scope = "PUBLIC"
- }
-}
-
-# with encryption and SNA
-resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" {
- project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
- name = "example-instance"
- backup_schedule = "0 3 * * *"
- retention_days = 31
- flavor_id = "flavor_id"
- storage = {
- class = "premium-perf2-stackit"
- size = 50
- }
- version = 2022
- encryption = {
- kek_key_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
- kek_key_ring_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
- kek_key_version = 1
- service_account = "service_account@email"
- }
- network = {
- acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
- access_scope = "SNA"
- }
-}
-
-
-# Only use the import statement, if you want to import an existing sqlserverflex instance
-import {
- to = stackitprivatepreview_sqlserverflexalpha_instance.import-example
- id = "${var.project_id},${var.region},${var.sql_instance_id}"
-}
-
-# import with identity
-import {
- to = stackitprivatepreview_sqlserverflexalpha_instance.import-example
- identity = {
- project_id = var.project_id
- region = var.region
- instance_id = var.sql_instance_id
- }
-}
diff --git a/examples/resources/stackitprivatepreview_sqlserverflexbeta_user/resource.tf b/examples/resources/stackitprivatepreview_sqlserverflexbeta_user/resource.tf
deleted file mode 100644
index 83c52561..00000000
--- a/examples/resources/stackitprivatepreview_sqlserverflexbeta_user/resource.tf
+++ /dev/null
@@ -1,12 +0,0 @@
-resource "stackitprivatepreview_sqlserverflexalpha_user" "example" {
- project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
- instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
- username = "username"
- roles = ["role"]
-}
-
-# Only use the import statement, if you want to import an existing sqlserverflex user
-import {
- to = stackitprivatepreview_sqlserverflexalpha_user.import-example
- id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
-}
\ No newline at end of file
diff --git a/generator/cmd/build/build.go b/generator/cmd/build/build.go
deleted file mode 100644
index f8585bad..00000000
--- a/generator/cmd/build/build.go
+++ /dev/null
@@ -1,346 +0,0 @@
-package build
-
-import (
- "errors"
- "fmt"
- "go/ast"
- "go/parser"
- "go/token"
- "log/slog"
- "os"
- "os/exec"
- "path"
- "regexp"
- "strings"
-)
-
-type Builder struct {
- rootDir string
- SkipClone bool
- SkipCleanup bool
- PackagesOnly bool
- Verbose bool
- Debug bool
-}
-
-func (b *Builder) Build() error {
- slog.Info("Starting Builder")
- if b.PackagesOnly {
- slog.Info(" >>> only generating pkg_gen <<<")
- }
-
- rootErr := b.determineRoot()
- if rootErr != nil {
- return rootErr
- }
-
- if !b.PackagesOnly {
- if b.Verbose {
- slog.Info(" ... Checking needed commands available")
- }
- chkErr := checkCommands([]string{})
- if chkErr != nil {
- return chkErr
- }
- }
-
- // if !b.SkipCleanup {
- // slog.Info("Cleaning up old packages directory")
- // err := os.RemoveAll(path.Join(b.rootDir, "pkg_gen"))
- // if err != nil {
- // return err
- // }
- //}
- //
- // if !b.SkipCleanup && !b.PackagesOnly {
- // slog.Info("Cleaning up old packages directory")
- // err := os.RemoveAll(path.Join(b.rootDir, "pkg_gen"))
- // if err != nil {
- // return err
- // }
- //}
-
- // slog.Info("Creating generator dir", "dir", fmt.Sprintf("%s/%s", *root, GEN_REPO_NAME))
- // genDir := path.Join(*root, GEN_REPO_NAME)
- // if !b.SkipClone {
- // err = createGeneratorDir(GEN_REPO, genDir, b.SkipClone)
- // if err != nil {
- // return err
- // }
- //}
-
- oasHandlerErr := b.oasHandler(path.Join(b.rootDir, "service_specs"))
- if oasHandlerErr != nil {
- return oasHandlerErr
- }
-
- // if !b.PackagesOnly {
- // slog.Info("Generating service boilerplate")
- // err = generateServiceFiles(*root, path.Join(*root, GEN_REPO_NAME))
- // if err != nil {
- // return err
- // }
- //
- // slog.Info("Copying all service files")
- // err = CopyDirectory(
- // path.Join(*root, "generated", "internal", "services"),
- // path.Join(*root, "stackit", "internal", "services"),
- // )
- // if err != nil {
- // return err
- // }
- //
- // err = createBoilerplate(*root, path.Join(*root, "stackit", "internal", "services"))
- // if err != nil {
- // return err
- // }
- //}
-
- // workaround to remove linter complain :D
- if b.PackagesOnly && b.Verbose && b.SkipClone && b.SkipCleanup {
- bpErr := createBoilerplate(b.rootDir, "boilerplate")
- if bpErr != nil {
- return bpErr
- }
- }
-
- slog.Info("Done")
- return nil
-}
-
-type templateData struct {
- PackageName string
- PackageNameCamel string
- PackageNamePascal string
- NameCamel string
- NamePascal string
- NameSnake string
- Fields []string
-}
-
-func createBoilerplate(rootFolder, folder string) error {
- services, err := os.ReadDir(folder)
- if err != nil {
- return err
- }
- for _, svc := range services {
- if !svc.IsDir() {
- continue
- }
- resources, err := os.ReadDir(path.Join(folder, svc.Name()))
- if err != nil {
- return err
- }
-
- var handleDS bool
- var handleRes bool
- var foundDS bool
- var foundRes bool
-
- for _, res := range resources {
- if !res.IsDir() {
- continue
- }
-
- resourceName := res.Name()
-
- dsFile := path.Join(
- folder,
- svc.Name(),
- res.Name(),
- "datasources_gen",
- fmt.Sprintf("%s_data_source_gen.go", res.Name()),
- )
- handleDS = FileExists(dsFile)
-
- resFile := path.Join(
- folder,
- svc.Name(),
- res.Name(),
- "resources_gen",
- fmt.Sprintf("%s_resource_gen.go", res.Name()),
- )
- handleRes = FileExists(resFile)
-
- dsGoFile := path.Join(folder, svc.Name(), res.Name(), "datasource.go")
- foundDS = FileExists(dsGoFile)
-
- resGoFile := path.Join(folder, svc.Name(), res.Name(), "resource.go")
- foundRes = FileExists(resGoFile)
-
- if handleDS && !foundDS {
- slog.Info(" creating missing datasource.go", "service", svc.Name(), "resource", resourceName)
- if !ValidateSnakeCase(resourceName) {
- return errors.New("resource name is invalid")
- }
-
- fields, tokenErr := getTokens(dsFile)
- if tokenErr != nil {
- return fmt.Errorf("error reading tokens: %w", tokenErr)
- }
-
- tplName := "data_source_scaffold.gotmpl"
- err = writeTemplateToFile(
- tplName,
- path.Join(rootFolder, "cmd", "cmd", "build", "templates", tplName),
- dsGoFile,
- &templateData{
- PackageName: svc.Name(),
- PackageNameCamel: ToCamelCase(svc.Name()),
- PackageNamePascal: ToPascalCase(svc.Name()),
- NameCamel: ToCamelCase(resourceName),
- NamePascal: ToPascalCase(resourceName),
- NameSnake: resourceName,
- Fields: fields,
- },
- )
- if err != nil {
- panic(err)
- }
- }
-
- if handleRes && !foundRes {
- slog.Info(" creating missing resource.go", "service", svc.Name(), "resource", resourceName)
- if !ValidateSnakeCase(resourceName) {
- return errors.New("resource name is invalid")
- }
-
- fields, tokenErr := getTokens(resFile)
- if tokenErr != nil {
- return fmt.Errorf("error reading tokens: %w", tokenErr)
- }
-
- tplName := "resource_scaffold.gotmpl"
- err = writeTemplateToFile(
- tplName,
- path.Join(rootFolder, "cmd", "cmd", "build", "templates", tplName),
- resGoFile,
- &templateData{
- PackageName: svc.Name(),
- PackageNameCamel: ToCamelCase(svc.Name()),
- PackageNamePascal: ToPascalCase(svc.Name()),
- NameCamel: ToCamelCase(resourceName),
- NamePascal: ToPascalCase(resourceName),
- NameSnake: resourceName,
- Fields: fields,
- },
- )
- if err != nil {
- return err
- }
-
- if !FileExists(path.Join(folder, svc.Name(), res.Name(), "functions.go")) {
- slog.Info(" creating missing functions.go", "service", svc.Name(), "resource", resourceName)
- if !ValidateSnakeCase(resourceName) {
- return errors.New("resource name is invalid")
- }
- fncTplName := "functions_scaffold.gotmpl"
- err = writeTemplateToFile(
- fncTplName,
- path.Join(rootFolder, "cmd", "cmd", "build", "templates", fncTplName),
- path.Join(folder, svc.Name(), res.Name(), "functions.go"),
- &templateData{
- PackageName: svc.Name(),
- PackageNameCamel: ToCamelCase(svc.Name()),
- PackageNamePascal: ToPascalCase(svc.Name()),
- NameCamel: ToCamelCase(resourceName),
- NamePascal: ToPascalCase(resourceName),
- NameSnake: resourceName,
- },
- )
- if err != nil {
- return err
- }
- }
- }
- }
- }
- return nil
-}
-
-func handleLine(line string) (string, error) {
- schemaRegex := regexp.MustCompile(`(\s+")(id)(": schema.[a-zA-Z0-9]+Attribute{)`)
-
- schemaMatches := schemaRegex.FindAllStringSubmatch(line, -1)
- if schemaMatches != nil {
- return fmt.Sprintf("%stf_original_api_id%s", schemaMatches[0][1], schemaMatches[0][3]), nil
- }
-
- modelRegex := regexp.MustCompile(`(\s+Id\s+types.[a-zA-Z0-9]+\s+.tfsdk:")(id)(".)`)
- modelMatches := modelRegex.FindAllStringSubmatch(line, -1)
- if modelMatches != nil {
- return fmt.Sprintf("%stf_original_api_id%s", modelMatches[0][1], modelMatches[0][3]), nil
- }
-
- return line, nil
-}
-
-func (b *Builder) determineRoot() error {
- cmd := exec.Command("git", "rev-parse", "--show-toplevel")
- out, err := cmd.Output()
- if err != nil {
- return err
- }
- lines := strings.Split(string(out), "\n")
- if lines[0] == "" {
- return fmt.Errorf("unable to determine root directory from git")
- }
- b.rootDir = lines[0]
- if b.Verbose {
- slog.Info(" ... using root", "dir", b.rootDir)
- }
-
- return nil
-}
-
-// func createGeneratorDir(repoUrl, targetDir string, skipClone bool) error {
-// if !skipClone {
-// if FileExists(targetDir) {
-// remErr := os.RemoveAll(targetDir)
-// if remErr != nil {
-// return remErr
-// }
-// }
-// _, cloneErr := git.Clone(
-// clone.Repository(repoUrl),
-// clone.Directory(targetDir),
-// )
-// if cloneErr != nil {
-// return cloneErr
-// }
-// }
-// return nil
-//}
-
-func getTokens(fileName string) ([]string, error) {
- fset := token.NewFileSet()
-
- var result []string
-
- node, err := parser.ParseFile(fset, fileName, nil, parser.ParseComments)
- if err != nil {
- return nil, err
- }
-
- ast.Inspect(
- node, func(n ast.Node) bool {
- // Suche nach Typ-Deklarationen (structs)
- ts, ok := n.(*ast.TypeSpec)
- if ok {
- if strings.Contains(ts.Name.Name, "Model") {
- ast.Inspect(
- ts, func(sn ast.Node) bool {
- tts, tok := sn.(*ast.Field)
- if tok {
- result = append(result, tts.Names[0].String())
- }
- return true
- },
- )
- }
- }
- return true
- },
- )
- return result, nil
-}
diff --git a/generator/cmd/build/functions.go b/generator/cmd/build/functions.go
deleted file mode 100644
index 5f609837..00000000
--- a/generator/cmd/build/functions.go
+++ /dev/null
@@ -1,120 +0,0 @@
-package build
-
-import (
- "fmt"
- "log/slog"
- "os"
- "os/exec"
- "strings"
- "text/template"
-)
-
-func FileExists(pathValue string) bool {
- _, err := os.Stat(pathValue)
- if os.IsNotExist(err) {
- return false
- }
- if err != nil {
- panic(err)
- }
- return true
-}
-
-func ucfirst(s string) string {
- if s == "" {
- return ""
- }
- return strings.ToUpper(s[:1]) + s[1:]
-}
-
-func writeTemplateToFile(tplName, tplFile, outFile string, data *templateData) error {
- fn := template.FuncMap{
- "ucfirst": ucfirst,
- }
-
- tmpl, err := template.New(tplName).Funcs(fn).ParseFiles(tplFile)
- if err != nil {
- return err
- }
-
- var f *os.File
- f, err = os.Create(outFile)
- if err != nil {
- return err
- }
-
- err = tmpl.Execute(f, *data)
- if err != nil {
- return err
- }
-
- err = f.Close()
- if err != nil {
- return err
- }
- return nil
-}
-
-/* saved for later
-func deleteFiles(fNames ...string) error {
- for _, fName := range fNames {
- if _, err := os.Stat(fName); !os.IsNotExist(err) {
- err = os.Remove(fName)
- if err != nil {
- return err
- }
- }
- }
- return nil
-}
-
-func copyFile(src, dst string) (int64, error) {
- sourceFileStat, err := os.Stat(src)
- if err != nil {
- return 0, err
- }
-
- if !sourceFileStat.Mode().IsRegular() {
- return 0, fmt.Errorf("%s is not a regular file", src)
- }
-
- source, err := os.Open(src)
- if err != nil {
- return 0, err
- }
- defer func(source *os.File) {
- err := source.Close()
- if err != nil {
- slog.Error("copyFile", "err", err)
- }
- }(source)
-
- destination, err := os.Create(dst)
- if err != nil {
- return 0, err
- }
- defer func(destination *os.File) {
- err := destination.Close()
- if err != nil {
- slog.Error("copyFile", "err", err)
- }
- }(destination)
- nBytes, err := io.Copy(destination, source)
- return nBytes, err
-}
-*/
-
-func checkCommands(commands []string) error {
- for _, commandName := range commands {
- if !commandExists(commandName) {
- return fmt.Errorf("missing command %s", commandName)
- }
- slog.Info(" found", "command", commandName)
- }
- return nil
-}
-
-func commandExists(cmd string) bool {
- _, err := exec.LookPath(cmd)
- return err == nil
-}
diff --git a/generator/cmd/build/oas-handler.go b/generator/cmd/build/oas-handler.go
deleted file mode 100644
index d4ab5c4a..00000000
--- a/generator/cmd/build/oas-handler.go
+++ /dev/null
@@ -1,446 +0,0 @@
-package build
-
-import (
- "bufio"
- "bytes"
- "errors"
- "fmt"
- "log"
- "log/slog"
- "os"
- "os/exec"
- "path"
- "regexp"
- "strings"
-
- "gopkg.in/yaml.v3"
-
- "github.com/ldez/go-git-cmd-wrapper/v2/clone"
- "github.com/ldez/go-git-cmd-wrapper/v2/git"
-)
-
-const (
- OasRepoName = "stackit-api-specifications"
- OasRepo = "https://github.com/stackitcloud/stackit-api-specifications.git"
-
- ResTypeResource = "resources"
- ResTypeDataSource = "datasources"
-)
-
-type Data struct {
- ServiceName string `yaml:",omitempty" json:",omitempty"`
- Versions []Version `yaml:"versions" json:"versions"`
-}
-
-type Version struct {
- Name string `yaml:"name" json:"name"`
- Path string `yaml:"path" json:"path"`
-}
-
-var oasTempDir string
-
-func (b *Builder) oasHandler(specDir string) error {
- if b.Verbose {
- slog.Info("creating schema files", "dir", specDir)
- }
- if _, err := os.Stat(specDir); os.IsNotExist(err) {
- return fmt.Errorf("spec files directory does not exist")
- }
-
- err := b.createRepoDir(b.SkipClone)
- if err != nil {
- return fmt.Errorf("%s", err.Error())
- }
-
- err2 := b.handleServices(specDir)
- if err2 != nil {
- return err2
- }
-
- if !b.SkipCleanup {
- if b.Verbose {
- slog.Info("Finally removing temporary files and directories")
- }
- err := os.RemoveAll(path.Join(b.rootDir, "generated"))
- if err != nil {
- slog.Error("RemoveAll", "dir", path.Join(b.rootDir, "generated"), "err", err)
- return err
- }
-
- err = os.RemoveAll(oasTempDir)
- if err != nil {
- slog.Error("RemoveAll", "dir", oasTempDir, "err", err)
- return err
- }
- }
-
- return nil
-}
-
-func (b *Builder) handleServices(specDir string) error {
- services, err := os.ReadDir(specDir)
- if err != nil {
- return err
- }
-
- for _, svc := range services {
- if !svc.IsDir() {
- continue
- }
-
- if b.Verbose {
- slog.Info(" ... found", "service", svc.Name())
- }
- var svcVersions Data
- svcVersions.ServiceName = svc.Name()
-
- versionsErr := b.getServiceVersions(path.Join(specDir, svc.Name(), "generator_settings.yml"), &svcVersions)
- if versionsErr != nil {
- return versionsErr
- }
-
- oasSpecErr := b.generateServiceFiles(&svcVersions)
- if oasSpecErr != nil {
- return oasSpecErr
- }
- }
- return nil
-}
-
-func (b *Builder) getServiceVersions(confFile string, data *Data) error {
- if _, cfgFileErr := os.Stat(confFile); os.IsNotExist(cfgFileErr) {
- return fmt.Errorf("config file does not exist")
- }
-
- fileContent, fileErr := os.ReadFile(confFile)
- if fileErr != nil {
- return fileErr
- }
- convErr := yaml.Unmarshal(fileContent, &data)
- if convErr != nil {
- return convErr
- }
-
- return nil
-}
-
-func (b *Builder) createRepoDir(skipClone bool) error {
- tmpDirName, err := os.MkdirTemp("", "oasbuild")
- if err != nil {
- return err
- }
- oasTempDir = path.Join(tmpDirName, OasRepoName)
- slog.Info("Creating oas repo dir", "dir", oasTempDir)
- if !skipClone {
- if FileExists(oasTempDir) {
- slog.Warn("target dir exists - skipping", "targetDir", oasTempDir)
- return nil
- }
- out, cloneErr := git.Clone(
- clone.Repository(OasRepo),
- clone.Directory(oasTempDir),
- )
- if cloneErr != nil {
- slog.Error("git clone error", "output", out)
- return cloneErr
- }
- if b.Verbose {
- slog.Info("git clone result", "output", out)
- }
- }
- return nil
-}
-
-func (b *Builder) generateServiceFiles(data *Data) error {
- err := os.MkdirAll(path.Join(b.rootDir, "generated", "specs"), 0o750)
- if err != nil {
- return err
- }
-
- for _, v := range data.Versions {
- specFiles, specsErr := os.ReadDir(path.Join(b.rootDir, "service_specs", data.ServiceName, v.Name))
- if specsErr != nil {
- return specsErr
- }
- for _, specFile := range specFiles {
- if specFile.IsDir() {
- continue
- }
- r := regexp.MustCompile(`^(.*)_config.yml$`)
- matches := r.FindAllStringSubmatch(specFile.Name(), -1)
- if matches == nil {
- slog.Warn(" skipping file (no regex match)", "file", specFile.Name())
- continue
- }
-
- srcSpecFile := path.Join(b.rootDir, "service_specs", data.ServiceName, v.Name, specFile.Name())
-
- if matches[0][0] != specFile.Name() {
- return fmt.Errorf("matched filename differs from original filename - this should not happen")
- }
- resource := matches[0][1]
- if b.Verbose {
- slog.Info(
- " found service spec",
- "service",
- data.ServiceName,
- "resource",
- resource,
- "file",
- specFile.Name(),
- )
- }
-
- oasFile := path.Join(
- oasTempDir,
- "services",
- data.ServiceName,
- v.Path,
- fmt.Sprintf("%s.json", data.ServiceName),
- )
- if _, oasErr := os.Stat(oasFile); os.IsNotExist(oasErr) {
- slog.Warn(
- " could not find matching oas",
- "svc",
- data.ServiceName,
- "version",
- v.Name,
- )
- continue
- }
-
- // determine correct target service name
- scName := fmt.Sprintf("%s%s", data.ServiceName, v.Name)
- scName = strings.ReplaceAll(scName, "-", "")
-
- specJSONFile := path.Join(
- b.rootDir,
- "generated",
- "specs",
- fmt.Sprintf("%s_%s_spec.json", scName, resource),
- )
-
- cmdErr := b.runTerraformPluginGenOpenAPI(srcSpecFile, specJSONFile, oasFile)
- if cmdErr != nil {
- return cmdErr
- }
-
- cmdResGenErr := b.runTerraformPluginGenFramework(ResTypeResource, scName, resource, specJSONFile)
- if cmdResGenErr != nil {
- return cmdResGenErr
- }
-
- cmdDsGenErr := b.runTerraformPluginGenFramework(ResTypeDataSource, scName, resource, specJSONFile)
- if cmdDsGenErr != nil {
- return cmdDsGenErr
- }
- }
- }
-
- return nil
-}
-
-func (b *Builder) runTerraformPluginGenFramework(resType, svcName, resource, specJSONFile string) error {
- var stdOut, stdErr bytes.Buffer
- tgtFolder := path.Join(
- b.rootDir,
- "stackit",
- "internal",
- "services",
- svcName,
- resource,
- fmt.Sprintf("%s_gen", resType),
- )
-
- //nolint:gosec // this file is not sensitive, so we can use 0755
- err := os.MkdirAll(tgtFolder, 0o755)
- if err != nil {
- return err
- }
-
- var subCmd string
- switch resType {
- case ResTypeResource:
- subCmd = "resources"
- case ResTypeDataSource:
- subCmd = "data-sources"
- default:
- return fmt.Errorf("unknown resource type given: %s", resType)
- }
-
- // nolint:gosec // #nosec this command is not using any untrusted input, so we can ignore gosec warning
- cmd := exec.Command(
- "tfplugingen-framework",
- "generate",
- subCmd,
- "--input",
- specJSONFile,
- "--output",
- tgtFolder,
- "--package",
- svcName,
- )
-
- cmd.Stdout = &stdOut
- cmd.Stderr = &stdErr
- if err = cmd.Start(); err != nil {
- slog.Error(fmt.Sprintf("tfplugingen-framework generate %s", resType), "error", err)
- return err
- }
-
- if err = cmd.Wait(); err != nil {
- var exitErr *exec.ExitError
- if errors.As(err, &exitErr) {
- slog.Error(
- fmt.Sprintf("tfplugingen-framework generate %s", resType),
- "code",
- exitErr.ExitCode(),
- "error",
- err,
- "stdout",
- stdOut.String(),
- "stderr",
- stdErr.String(),
- )
- return fmt.Errorf("%s", stdErr.String())
- }
- if err != nil {
- slog.Error(
- fmt.Sprintf("tfplugingen-framework generate %s", resType),
- "err",
- err,
- "stdout",
- stdOut.String(),
- "stderr",
- stdErr.String(),
- )
- return err
- }
- }
-
- if resType == ResTypeDataSource {
- tfAnoErr := b.handleTfTagForDatasourceFile(
- path.Join(tgtFolder, fmt.Sprintf("%s_data_source_gen.go", resource)),
- svcName,
- resource,
- )
- if tfAnoErr != nil {
- return tfAnoErr
- }
- }
-
- return nil
-}
-
-func (b *Builder) runTerraformPluginGenOpenAPI(srcSpecFile, specJSONFile, oasFile string) error {
- var stdOut, stdErr bytes.Buffer
-
- // nolint:gosec // #nosec this command is not using any untrusted input, so we can ignore gosec warning
- cmd := exec.Command(
- "tfplugingen-openapi",
- "generate",
- "--config",
- srcSpecFile,
- "--output",
- specJSONFile,
- oasFile,
- )
- cmd.Stdout = &stdOut
- cmd.Stderr = &stdErr
-
- if err := cmd.Start(); err != nil {
- slog.Error(
- "tfplugingen-openapi generate",
- "error",
- err,
- "stdOut",
- stdOut.String(),
- "stdErr",
- stdErr.String(),
- )
- return err
- }
-
- if err := cmd.Wait(); err != nil {
- var exitErr *exec.ExitError
- if errors.As(err, &exitErr) {
- slog.Error(
- "tfplugingen-openapi generate",
- "code",
- exitErr.ExitCode(),
- "error",
- err,
- "stdout",
- stdOut.String(),
- "stderr",
- stdErr.String(),
- )
- return fmt.Errorf("%s", stdErr.String())
- }
- if err != nil {
- slog.Error(
- "tfplugingen-openapi generate",
- "err",
- err,
- "stdout",
- stdOut.String(),
- "stderr",
- stdErr.String(),
- )
- return err
- }
- }
- if stdOut.Len() > 0 {
- slog.Warn(" command output", "stdout", stdOut.String(), "stderr", stdErr.String())
- }
-
- return nil
-}
-
-// handleTfTagForDatasourceFile replaces existing "id" with "stf_original_api_id"
-func (b *Builder) handleTfTagForDatasourceFile(filePath, service, resource string) error {
- if b.Verbose {
- slog.Info(" handle terraform tag for datasource", "service", service, "resource", resource)
- }
- if !FileExists(filePath) {
- slog.Warn(" could not find file, skipping", "path", filePath)
- return nil
- }
- f, err := os.Open(filePath)
- if err != nil {
- return err
- }
-
- tmp, err := os.CreateTemp(b.rootDir, "replace-*")
- if err != nil {
- return err
- }
-
- sc := bufio.NewScanner(f)
- for sc.Scan() {
- resLine, err := handleLine(sc.Text())
- if err != nil {
- return err
- }
- if _, err := tmp.WriteString(resLine + "\n"); err != nil {
- return err
- }
- }
- if scErr := sc.Err(); scErr != nil {
- return scErr
- }
-
- if err := tmp.Close(); err != nil {
- return err
- }
-
- if err := f.Close(); err != nil {
- return err
- }
-
- //nolint:gosec // path traversal is not a concern here
- if err := os.Rename(tmp.Name(), filePath); err != nil {
- log.Fatal(err)
- }
- return nil
-}
diff --git a/generator/cmd/build/templates/data_source_scaffold.gotmpl b/generator/cmd/build/templates/data_source_scaffold.gotmpl
deleted file mode 100644
index ba4e8095..00000000
--- a/generator/cmd/build/templates/data_source_scaffold.gotmpl
+++ /dev/null
@@ -1,148 +0,0 @@
-package {{.PackageName}}
-
-import (
- "context"
- "fmt"
- "net/http"
-
- "github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-
- {{.PackageName}}Pkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/{{.PackageName}}"
-
- {{.PackageName}}Gen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/{{.PackageName}}/{{.NameSnake}}/datasources_gen"
-)
-
-var _ datasource.DataSource = (*{{.NameCamel}}DataSource)(nil)
-
-const errorPrefix = "[{{.PackageNamePascal}} - {{.NamePascal}}]"
-
-func New{{.NamePascal}}DataSource() datasource.DataSource {
- return &{{.NameCamel}}DataSource{}
-}
-
-type dsModel struct {
- {{.PackageName}}Gen.{{.NamePascal}}Model
- TfId types.String `tfsdk:"id"`
-}
-
-type {{.NameCamel}}DataSource struct{
- client *{{.PackageName}}Pkg.APIClient
- providerData core.ProviderData
-}
-
-func (d *{{.NameCamel}}DataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_{{.PackageName}}_{{.NameSnake}}"
-}
-
-func (d *{{.NameCamel}}DataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- resp.Schema = {{.PackageName}}Gen.{{.NamePascal}}DataSourceSchema(ctx)
- resp.Schema.Attributes["id"] = schema.StringAttribute{
- Computed: true,
- Description: "The terraform internal identifier.",
- MarkdownDescription: "The terraform internal identifier.",
- }
-}
-
-// Configure adds the provider configured client to the data source.
-func (d *{{.NameCamel}}DataSource) Configure(
- ctx context.Context,
- req datasource.ConfigureRequest,
- resp *datasource.ConfigureResponse,
-) {
- var ok bool
- d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
- if !ok {
- return
- }
-
- apiClientConfigOptions := []config.ConfigurationOption{
- config.WithCustomAuth(d.providerData.RoundTripper),
- utils.UserAgentConfigOption(d.providerData.Version),
- }
- if d.providerData.{{.PackageNamePascal}}CustomEndpoint != "" {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithEndpoint(d.providerData.{{.PackageNamePascal}}CustomEndpoint),
- )
- } else {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithRegion(d.providerData.GetRegion()),
- )
- }
- apiClient, err := {{.PackageName}}Pkg.NewAPIClient(apiClientConfigOptions...)
- if err != nil {
- resp.Diagnostics.AddError(
- "Error configuring API client",
- fmt.Sprintf(
- "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
- err,
- ),
- )
- return
- }
- d.client = apiClient
- tflog.Info(ctx, fmt.Sprintf("%s client configured", errorPrefix))
-}
-
-func (d *{{.NameCamel}}DataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var data dsModel
-
- // Read Terraform configuration data into the model
- resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := data.ProjectId.ValueString()
- region := d.providerData.GetRegionWithOverride(data.Region)
- {{.NameCamel}}Id := data.{{.NamePascal}}Id.ValueString()
-
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
-
- // TODO: implement needed fields
- ctx = tflog.SetField(ctx, "{{.NameCamel}}_id", {{.NameCamel}}Id)
-
- // TODO: refactor to correct implementation
- {{.NameCamel}}Resp, err := d.client.Get{{.NamePascal}}Request(ctx, projectId, region, {{.NameCamel}}Id).Execute()
- if err != nil {
- utils.LogError(
- ctx,
- &resp.Diagnostics,
- err,
- "Reading {{.NameCamel}}",
- fmt.Sprintf("{{.NameCamel}} with ID %q does not exist in project %q.", {{.NameCamel}}Id, projectId),
- map[int]string{
- http.StatusForbidden: fmt.Sprintf("Project with ID %q not found or forbidden access", projectId),
- },
- )
- resp.State.RemoveResource(ctx)
- return
- }
-
- ctx = core.LogResponse(ctx)
-
-
- data.TfId = utils.BuildInternalTerraformId(projectId, region, ..)
-
- // TODO: fill remaining fields
-{{- range .Fields }}
- // data.{{.}} = types.Sometype(apiResponse.Get{{.}}())
-{{- end -}}
-
- // Save data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-
- tflog.Info(ctx, fmt.Sprintf("%s read successful", errorPrefix))
-}
diff --git a/generator/cmd/build/templates/functions_scaffold.gotmpl b/generator/cmd/build/templates/functions_scaffold.gotmpl
deleted file mode 100644
index de4d2dbe..00000000
--- a/generator/cmd/build/templates/functions_scaffold.gotmpl
+++ /dev/null
@@ -1,98 +0,0 @@
-package {{.PackageName}}
-
-import (
- "context"
- "fmt"
- "math"
-
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
-
- {{.PackageName}} "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/{{.PackageName}}"
- {{.PackageName}}ResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/{{.PackageName}}/instance/resources_gen"
-)
-
-func mapResponseToModel(
- ctx context.Context,
- resp *{{.PackageName}}.Get{{.NamePascal}}Response,
- m *{{.PackageName}}ResGen.{{.NamePascal}}Model,
- tfDiags diag.Diagnostics,
-) error {
- // TODO: complete and refactor
- m.Id = types.StringValue(resp.GetId())
-
- /*
- sampleList, diags := types.ListValueFrom(ctx, types.StringType, resp.GetList())
- tfDiags.Append(diags...)
- if diags.HasError() {
- return fmt.Errorf(
- "error converting list response value",
- )
- }
- sample, diags := {{.PackageName}}ResGen.NewSampleValue(
- {{.PackageName}}ResGen.SampleValue{}.AttributeTypes(ctx),
- map[string]attr.Value{
- "field": types.StringValue(string(resp.GetField())),
- },
- )
- tfDiags.Append(diags...)
- if diags.HasError() {
- return fmt.Errorf(
- "error converting sample response value",
- "sample",
- types.StringValue(string(resp.GetField())),
- )
- }
- m.Sample = sample
- */
- return nil
-}
-
-func handleEncryption(
- m *{{.PackageName}}ResGen.{{.NamePascal}}Model,
- resp *{{.PackageName}}.Get{{.NamePascal}}Response,
-) {{.PackageName}}ResGen.EncryptionValue {
- if !resp.HasEncryption() ||
- resp.Encryption == nil ||
- resp.Encryption.KekKeyId == nil ||
- resp.Encryption.KekKeyRingId == nil ||
- resp.Encryption.KekKeyVersion == nil ||
- resp.Encryption.ServiceAccount == nil {
-
- if m.Encryption.IsNull() || m.Encryption.IsUnknown() {
- return {{.PackageName}}ResGen.NewEncryptionValueNull()
- }
- return m.Encryption
- }
-
- enc := {{.PackageName}}ResGen.NewEncryptionValueNull()
- if kVal, ok := resp.Encryption.GetKekKeyIdOk(); ok {
- enc.KekKeyId = types.StringValue(kVal)
- }
- if kkVal, ok := resp.Encryption.GetKekKeyRingIdOk(); ok {
- enc.KekKeyRingId = types.StringValue(kkVal)
- }
- if kkvVal, ok := resp.Encryption.GetKekKeyVersionOk(); ok {
- enc.KekKeyVersion = types.StringValue(kkvVal)
- }
- if sa, ok := resp.Encryption.GetServiceAccountOk(); ok {
- enc.ServiceAccount = types.StringValue(sa)
- }
- return enc
-}
-
-func toCreatePayload(
- ctx context.Context,
- model *{{.PackageName}}ResGen.{{.NamePascal}}Model,
-) (*{{.PackageName}}.Create{{.NamePascal}}RequestPayload, error) {
- if model == nil {
- return nil, fmt.Errorf("nil model")
- }
-
- return &{{.PackageName}}.Create{{.NamePascal}}RequestPayload{
- // TODO: fill fields
- }, nil
-}
diff --git a/generator/cmd/build/templates/resource_scaffold.gotmpl b/generator/cmd/build/templates/resource_scaffold.gotmpl
deleted file mode 100644
index 3fafc10c..00000000
--- a/generator/cmd/build/templates/resource_scaffold.gotmpl
+++ /dev/null
@@ -1,429 +0,0 @@
-package {{.PackageName}}
-
-import (
- "context"
- _ "embed"
- "fmt"
- "strings"
-
- "github.com/hashicorp/terraform-plugin-framework/path"
- "github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/{{.PackageName}}"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-
- {{.PackageName}}ResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/{{.PackageName}}/{{.NameSnake}}/resources_gen"
-)
-
-var (
- _ resource.Resource = &{{.NameCamel}}Resource{}
- _ resource.ResourceWithConfigure = &{{.NameCamel}}Resource{}
- _ resource.ResourceWithImportState = &{{.NameCamel}}Resource{}
- _ resource.ResourceWithModifyPlan = &{{.NameCamel}}Resource{}
- _ resource.ResourceWithIdentity = &{{.NameCamel}}Resource{}
-)
-
-func New{{.NamePascal}}Resource() resource.Resource {
- return &{{.NameCamel}}Resource{}
-}
-
-type {{.NameCamel}}Resource struct{
- client *{{.PackageName}}.APIClient
- providerData core.ProviderData
-}
-
-// resourceModel represents the Terraform resource state
-type resourceModel = {{.PackageName}}.{{.NamePascal}}Model
-
-type {{.NamePascal}}ResourceIdentityModel struct {
- ProjectID types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- // TODO: implement further needed parts
- {{.NamePascal}}ID types.String `tfsdk:"{{.NameSnake}}_id"`
-}
-
-// Metadata defines terraform resource name
-func (r *{{.NameCamel}}Resource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_{{.PackageName}}_{{.NameSnake}}"
-}
-
-//go:embed planModifiers.yaml
-var modifiersFileByte []byte
-
-// Schema loads the schema from generated files and adds plan modifiers
-func (r *{{.NameCamel}}Resource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
- schema = {{.PackageName}}ResGen.{{.NamePascal}}ResourceSchema(ctx)
-
- fields, err := {{.PackageName}}Utils.ReadModifiersConfig(modifiersFileByte)
- if err != nil {
- resp.Diagnostics.AddError("error during read modifiers config file", err.Error())
- return
- }
-
- err = {{.PackageName}}Utils.AddPlanModifiersToResourceSchema(fields, &schema)
- if err != nil {
- resp.Diagnostics.AddError("error adding plan modifiers", err.Error())
- return
- }
- resp.Schema = schema
-}
-
-// IdentitySchema defines the identity schema
-func (r *instanceResource) IdentitySchema(_ context.Context, _ resource.IdentitySchemaRequest, resp *resource.IdentitySchemaResponse) {
- resp.IdentitySchema = identityschema.Schema{
- Attributes: map[string]identityschema.Attribute{
- "project_id": identityschema.StringAttribute{
- RequiredForImport: true, // must be set during import by the practitioner
- },
- "region": identityschema.StringAttribute{
- RequiredForImport: true, // can be defaulted by the provider configuration
- },
- "instance_id": identityschema.StringAttribute{
- RequiredForImport: true, // can be defaulted by the provider configuration
- },
- // TODO: implement remaining schema parts
- },
- }
-}
-
-// Configure adds the provider configured client to the resource.
-func (r *{{.NameCamel}}Resource) Configure(
- ctx context.Context,
- req resource.ConfigureRequest,
- resp *resource.ConfigureResponse,
-) {
- var ok bool
- r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
- if !ok {
- return
- }
-
- apiClientConfigOptions := []config.ConfigurationOption{
- config.WithCustomAuth(r.providerData.RoundTripper),
- utils.UserAgentConfigOption(r.providerData.Version),
- }
- if r.providerData.{{.PackageNamePascal}}CustomEndpoint != "" {
- apiClientConfigOptions = append(apiClientConfigOptions, config.WithEndpoint(r.providerData.{{.PackageName}}CustomEndpoint))
- } else {
- apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(r.providerData.GetRegion()))
- }
- apiClient, err := {{.PackageName}}.NewAPIClient(apiClientConfigOptions...)
- if err != nil {
- resp.Diagnostics.AddError(
- "Error configuring API client",
- fmt.Sprintf(
- "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
- err,
- ),
- )
- return
- }
- r.client = apiClient
- tflog.Info(ctx, "{{.PackageName}}.{{.NamePascal}} client configured")
-}
-
-// ModifyPlan implements resource.ResourceWithModifyPlan.
-// Use the modifier to set the effective region in the current plan.
-func (r *{{.NameCamel}}Resource) ModifyPlan(
- ctx context.Context,
- req resource.ModifyPlanRequest,
- resp *resource.ModifyPlanResponse,
-) { // nolint:gocritic // function signature required by Terraform
-
- // skip initial empty configuration to avoid follow-up errors
- if req.Config.Raw.IsNull() {
- return
- }
- var configModel {{.PackageName}}ResGen.{{.NamePascal}}Model
- resp.Diagnostics.Append(req.Config.Get(ctx, &configModel)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- if req.Plan.Raw.IsNull() {
- return
- }
- var planModel {{.PackageName}}ResGen.{{.NamePascal}}Model
- resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- utils.AdaptRegion(ctx, configModel.Region, &planModel.Region, r.providerData.GetRegion(), resp)
- if resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(resp.Plan.Set(ctx, planModel)...)
- if resp.Diagnostics.HasError() {
- return
- }
-}
-
-// Create creates a new resource
-func (r *{{.NameCamel}}Resource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
- var data {{.PackageName}}ResGen.{{.NamePascal}}Model
-
- // Read Terraform plan data into the model
- resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := data.ProjectId.ValueString()
- region := data.Region.ValueString()
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
- // TODO: add remaining fields
-
- // TODO: Create API call logic
- /*
- // Generate API request body from model
- payload, err := toCreatePayload(ctx, &model)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error creating {{.NamePascal}}",
- fmt.Sprintf("Creating API payload: %v", err),
- )
- return
- }
- // Create new {{.NamePascal}}
- createResp, err := r.client.Create{{.NamePascal}}Request(
- ctx,
- projectId,
- region,
- ).Create{{.NamePascal}}RequestPayload(*payload).Execute()
- if err != nil {
- core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating {{.NamePascal}}", fmt.Sprintf("Calling API: %v", err))
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- {{.NamePascal}}Id := *createResp.Id
- */
-
- // Example data value setting
- data.{{.NameCamel | ucfirst}}Id = types.StringValue("id-from-response")
-
- // TODO: Set data returned by API in identity
- identity := {{.NamePascal}}ResourceIdentityModel{
- ProjectID: types.StringValue(projectId),
- Region: types.StringValue(region),
- // TODO: add missing values
- {{.NamePascal}}ID: types.StringValue({{.NamePascal}}Id),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- // TODO: implement wait handler if needed
- /*
-
- waitResp, err := wait.Create{{.NamePascal}}WaitHandler(
- ctx,
- r.client,
- projectId,
- {{.NamePascal}}Id,
- region,
- ).SetSleepBeforeWait(
- 30 * time.Second,
- ).SetTimeout(
- 90 * time.Minute,
- ).WaitWithContext(ctx)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error creating {{.NamePascal}}",
- fmt.Sprintf("{{.NamePascal}} creation waiting: %v", err),
- )
- return
- }
-
- if waitResp.Id == nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error creating {{.NamePascal}}",
- "{{.NamePascal}} creation waiting: returned id is nil",
- )
- return
- }
-
- // Map response body to schema
- err = mapResponseToModel(ctx, waitResp, &model, resp.Diagnostics)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error creating {{.NamePascal}}",
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
-
- */
-
- // Save data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-
- tflog.Info(ctx, "{{.PackageName}}.{{.NamePascal}} created")
-}
-
-func (r *{{.NameCamel}}Resource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
- var data {{.PackageName}}ResGen.{{.NamePascal}}Model
-
- // Read Terraform prior state data into the model
- resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- // Read identity data
- var identityData {{.NamePascal}}ResourceIdentityModel
- resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := identityData.ProjectID.ValueString()
- region := identityData.Region.ValueString()
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
-
- // TODO: Read API call logic
-
- // Save updated data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-
- // TODO: Set data returned by API in identity
- identity := {{.NamePascal}}ResourceIdentityModel{
- ProjectID: types.StringValue(projectId),
- Region: types.StringValue(region),
- // InstanceID: types.StringValue(instanceId),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- tflog.Info(ctx, "{{.PackageName}}.{{.NamePascal}} read")
-}
-
-func (r *{{.NameCamel}}Resource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
- var data {{.PackageName}}ResGen.{{.NamePascal}}Model
-
- // Read Terraform prior state data into the model
- resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := data.ProjectId.ValueString()
- region := data.Region.ValueString()
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
-
- // TODO: Update API call logic
-
- // TODO: Set data returned by API in identity
- identity := {{.NamePascal}}ResourceIdentityModel{
- ProjectID: types.StringValue(projectId),
- Region: types.StringValue(region),
- // TODO: add missing values
- {{.NamePascal}}ID: types.StringValue({{.NamePascal}}Id),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- // Save updated data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-
- tflog.Info(ctx, "{{.PackageName}}.{{.NamePascal}} updated")
-}
-
-func (r *{{.NameCamel}}Resource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
- var data {{.PackageName}}ResGen.{{.NamePascal}}Model
-
- // Read Terraform prior state data into the model
- resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- // Read identity data
- var identityData {{.NamePascal}}ResourceIdentityModel
- resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := identityData.ProjectID.ValueString()
- region := identityData.Region.ValueString()
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
-
- // TODO: Delete API call logic
-
- tflog.Info(ctx, "{{.PackageName}}.{{.NamePascal}} deleted")
-}
-
-// ImportState imports a resource into the Terraform state on success.
-// The expected format of the resource import identifier is: project_id,zone_id,record_set_id
-func (r *{{.NameCamel}}Resource) ImportState(
- ctx context.Context,
- req resource.ImportStateRequest,
- resp *resource.ImportStateResponse,
-) {
- idParts := strings.Split(req.ID, core.Separator)
-
- // TODO: Import logic
- // TODO: fix len and parts itself
- if len(idParts) < 2 || idParts[0] == "" || idParts[1] == "" {
- core.LogAndAddError(
- ctx, &resp.Diagnostics,
- "Error importing database",
- fmt.Sprintf(
- "Expected import identifier with format [project_id],[region],..., got %q",
- req.ID,
- ),
- )
- return
- }
-
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
- // ... more ...
-
- core.LogAndAddWarning(
- ctx,
- &resp.Diagnostics,
- "{{.PackageName | ucfirst}} database imported with empty password",
- "The database password is not imported as it is only available upon creation of a new database. The password field will be empty.",
- )
- tflog.Info(ctx, "{{.PackageName | ucfirst}} {{.NameCamel}} state imported")
-}
diff --git a/generator/cmd/build/templates/util.gotmpl b/generator/cmd/build/templates/util.gotmpl
deleted file mode 100644
index cecc8e9e..00000000
--- a/generator/cmd/build/templates/util.gotmpl
+++ /dev/null
@@ -1,47 +0,0 @@
-package utils
-
-import (
- "context"
- "fmt"
-
- {{.PackageName}} "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/{{.PackageName}}"
-
- "github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-)
-
-func ConfigureClient(
- ctx context.Context,
- providerData *core.ProviderData,
- diags *diag.Diagnostics,
-) *{{.PackageName}}.APIClient {
- apiClientConfigOptions := []config.ConfigurationOption{
- config.WithCustomAuth(providerData.RoundTripper),
- utils.UserAgentConfigOption(providerData.Version),
- }
- if providerData.{{.PackageName}}CustomEndpoint != "" {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithEndpoint(providerData.{{.PackageName}}CustomEndpoint),
- )
- } else {
- apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(providerData.GetRegion()))
- }
- apiClient, err := {{.PackageName}}.NewAPIClient(apiClientConfigOptions...)
- if err != nil {
- core.LogAndAddError(
- ctx,
- diags,
- "Error configuring API client",
- fmt.Sprintf(
- "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
- err,
- ),
- )
- return nil
- }
-
- return apiClient
-}
diff --git a/generator/cmd/build/templates/util_test.gotmpl b/generator/cmd/build/templates/util_test.gotmpl
deleted file mode 100644
index 567f2623..00000000
--- a/generator/cmd/build/templates/util_test.gotmpl
+++ /dev/null
@@ -1,97 +0,0 @@
-package utils
-
-import (
- "context"
- "os"
- "reflect"
- "testing"
-
- "github.com/hashicorp/terraform-plugin-framework/diag"
- sdkClients "github.com/stackitcloud/stackit-sdk-go/core/clients"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
- {{.PackageName}} "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/{{.PackageName}}"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-)
-
-const (
- testVersion = "1.2.3"
- testCustomEndpoint = "https://sqlserverflex-custom-endpoint.api.stackit.cloud"
-)
-
-func TestConfigureClient(t *testing.T) {
- /* mock authentication by setting service account token env variable */
- os.Clearenv()
- err := os.Setenv(sdkClients.ServiceAccountToken, "mock-val")
- if err != nil {
- t.Errorf("error setting env variable: %v", err)
- }
-
- type args struct {
- providerData *core.ProviderData
- }
- tests := []struct {
- name string
- args args
- wantErr bool
- expected *sqlserverflex.APIClient
- }{
- {
- name: "default endpoint",
- args: args{
- providerData: &core.ProviderData{
- Version: testVersion,
- },
- },
- expected: func() *sqlserverflex.APIClient {
- apiClient, err := sqlserverflex.NewAPIClient(
- config.WithRegion("eu01"),
- utils.UserAgentConfigOption(testVersion),
- )
- if err != nil {
- t.Errorf("error configuring client: %v", err)
- }
- return apiClient
- }(),
- wantErr: false,
- },
- {
- name: "custom endpoint",
- args: args{
- providerData: &core.ProviderData{
- Version: testVersion,
- SQLServerFlexCustomEndpoint: testCustomEndpoint,
- },
- },
- expected: func() *sqlserverflex.APIClient {
- apiClient, err := sqlserverflex.NewAPIClient(
- utils.UserAgentConfigOption(testVersion),
- config.WithEndpoint(testCustomEndpoint),
- )
- if err != nil {
- t.Errorf("error configuring client: %v", err)
- }
- return apiClient
- }(),
- wantErr: false,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.name, func(t *testing.T) {
- ctx := context.Background()
- diags := diag.Diagnostics{}
-
- actual := ConfigureClient(ctx, tt.args.providerData, &diags)
- if diags.HasError() != tt.wantErr {
- t.Errorf("ConfigureClient() error = %v, want %v", diags.HasError(), tt.wantErr)
- }
-
- if !reflect.DeepEqual(actual, tt.expected) {
- t.Errorf("ConfigureClient() = %v, want %v", actual, tt.expected)
- }
- },
- )
- }
-}
diff --git a/generator/cmd/buildCmd.go b/generator/cmd/buildCmd.go
deleted file mode 100644
index 4e1e3189..00000000
--- a/generator/cmd/buildCmd.go
+++ /dev/null
@@ -1,43 +0,0 @@
-package cmd
-
-import (
- "github.com/spf13/cobra"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/generator/cmd/build"
-)
-
-var (
- skipCleanup bool
- skipClone bool
- packagesOnly bool
- verbose bool
- debug bool
-)
-
-var buildCmd = &cobra.Command{
- Use: "build",
- Short: "Build the necessary boilerplate",
- Long: `...`,
- RunE: func(_ *cobra.Command, _ []string) error {
- b := build.Builder{
- SkipClone: skipClone,
- SkipCleanup: skipCleanup,
- PackagesOnly: packagesOnly,
- Verbose: verbose,
- Debug: debug,
- }
- return b.Build()
- },
-}
-
-func NewBuildCmd() *cobra.Command {
- return buildCmd
-}
-
-func init() { //nolint:gochecknoinits // This is the standard way to set up Cobra commands
- buildCmd.Flags().BoolVarP(&skipCleanup, "skip-clean", "c", false, "Skip cleanup steps")
- buildCmd.Flags().BoolVarP(&debug, "debug", "d", false, "Enable debug output")
- buildCmd.Flags().BoolVarP(&skipClone, "skip-clone", "g", false, "Skip cloning from git")
- buildCmd.Flags().BoolVarP(&packagesOnly, "packages-only", "p", false, "Only generate packages")
- buildCmd.Flags().BoolVarP(&verbose, "verbose", "v", false, "verbose - show more logs")
-}
diff --git a/generator/cmd/examplesCmd.go b/generator/cmd/examplesCmd.go
deleted file mode 100644
index a4c75962..00000000
--- a/generator/cmd/examplesCmd.go
+++ /dev/null
@@ -1,114 +0,0 @@
-package cmd
-
-import (
- "fmt"
- "os"
- "path"
-
- "github.com/spf13/cobra"
-)
-
-var examplesCmd = &cobra.Command{
- Use: "examples",
- Short: "create examples",
- Long: `...`,
- RunE: func(_ *cobra.Command, _ []string) error {
- // filePathStr := "stackit/internal/services/postgresflexalpha/database/datasources_gen/database_data_source_gen.go"
- //
- // src, err := os.ReadFile(filePathStr)
- // if err != nil {
- // return err
- //}
- //
- // i := interp.New(
- // interp.Options{
- // GoPath: "/home/henselinm/.asdf/installs/golang/1.25.6/packages",
- // BuildTags: nil,
- // Stdin: nil,
- // Stdout: nil,
- // Stderr: nil,
- // Args: nil,
- // Env: nil,
- // SourcecodeFilesystem: nil,
- // Unrestricted: false,
- // },
- //)
- // err = i.Use(i.Symbols("github.com/hashicorp/terraform-plugin-framework-validators"))
- // if err != nil {
- // return err
- //}
- // err = i.Use(stdlib.Symbols)
- // if err != nil {
- // return err
- //}
- // _, err = i.Eval(string(src))
- // if err != nil {
- // return err
- //}
- //
- // v, err := i.Eval("DatabaseDataSourceSchema")
- // if err != nil {
- // return err
- //}
- //
- // bar := v.Interface().(func(string) string)
- //
- // r := bar("Kung")
- // println(r)
- //
- // evalPath, err := i.EvalPath(filePathStr)
- // if err != nil {
- // return err
- //}
- //
- // fmt.Printf("%+v\n", evalPath)
-
- // _, err = i.Eval(`import "fmt"`)
- // if err != nil {
- // return err
- //}
- // _, err = i.Eval(`func Hallo() { fmt.Println("Hi!") }`)
- // if err != nil {
- // return err
- //}
-
- // v = i.Symbols("Hallo")
-
- // fmt.Println(v)
- return workServices()
- },
-}
-
-func workServices() error {
- startPath := path.Join("stackit", "internal", "services")
-
- services, err := os.ReadDir(startPath)
- if err != nil {
- return err
- }
-
- for _, entry := range services {
- if !entry.IsDir() {
- continue
- }
- resources, err := os.ReadDir(path.Join(startPath, entry.Name()))
- if err != nil {
- return err
- }
- for _, res := range resources {
- if !res.IsDir() {
- continue
- }
- fmt.Println("Gefunden:", startPath, "subdir", entry.Name(), "resource", res.Name())
- }
- }
- return nil
-}
-
-func NewExamplesCmd() *cobra.Command {
- return examplesCmd
-}
-
-// func init() { // nolint: gochecknoinits
-// examplesCmd.Flags().BoolVarP(&example, "example", "e", false, "example")
-//}
diff --git a/generator/cmd/getFieldsCmd.go b/generator/cmd/getFieldsCmd.go
deleted file mode 100644
index 06fe9e66..00000000
--- a/generator/cmd/getFieldsCmd.go
+++ /dev/null
@@ -1,148 +0,0 @@
-package cmd
-
-import (
- "fmt"
- "go/ast"
- "go/parser"
- "go/token"
- "path"
- "path/filepath"
- "strings"
-
- "github.com/spf13/cobra"
-)
-
-var (
- inFile string
- svcName string
- resName string
- resType string
- filePath string
-)
-
-var getFieldsCmd = &cobra.Command{
- Use: "get-fields",
- Short: "get fields from file",
- Long: `...`,
- PreRunE: func(_ *cobra.Command, _ []string) error {
- typeStr := "data_source"
- if resType != "resource" && resType != "datasource" {
- return fmt.Errorf("--type can only be resource or datasource")
- }
-
- if resType == "resource" {
- typeStr = resType
- }
-
- if inFile == "" && svcName == "" && resName == "" {
- return fmt.Errorf("--infile or --service and --resource must be provided")
- }
-
- if inFile != "" {
- if svcName != "" || resName != "" {
- return fmt.Errorf("--infile is provided and excludes --service and --resource")
- }
- p, err := filepath.Abs(inFile)
- if err != nil {
- return err
- }
- filePath = p
- return nil
- }
-
- if svcName != "" && resName == "" {
- return fmt.Errorf("if --service is provided, you MUST also provide --resource")
- }
-
- if svcName == "" && resName != "" {
- return fmt.Errorf("if --resource is provided, you MUST also provide --service")
- }
-
- p, err := filepath.Abs(
- path.Join(
- "stackit",
- "internal",
- "services",
- svcName,
- resName,
- fmt.Sprintf("%ss_gen", resType),
- fmt.Sprintf("%s_%s_gen.go", resName, typeStr),
- ),
- )
- if err != nil {
- return err
- }
- filePath = p
-
- //// Enum check
- // switch format {
- // case "json", "yaml":
- //default:
- // return fmt.Errorf("invalid --format: %s (want json|yaml)", format)
- //}
- return nil
- },
- RunE: func(_ *cobra.Command, _ []string) error {
- return getFields(filePath)
- },
-}
-
-func getFields(f string) error {
- tokens, err := getTokens(f)
- if err != nil {
- return err
- }
- for _, item := range tokens {
- fmt.Printf("%s \n", item)
- }
- return nil
-}
-
-func getTokens(fileName string) ([]string, error) {
- fset := token.NewFileSet()
- var result []string
-
- node, err := parser.ParseFile(fset, fileName, nil, parser.ParseComments)
- if err != nil {
- return nil, err
- }
-
- ast.Inspect(
- node, func(n ast.Node) bool {
- // Suche nach Typ-Deklarationen (structs)
- ts, ok := n.(*ast.TypeSpec)
- if ok {
- if strings.Contains(ts.Name.Name, "Model") {
- ast.Inspect(
- ts, func(sn ast.Node) bool {
- tts, tok := sn.(*ast.Field)
- if tok {
- result = append(result, tts.Names[0].String())
- }
- return true
- },
- )
- }
- }
- return true
- },
- )
- return result, nil
-}
-
-func NewGetFieldsCmd() *cobra.Command {
- return getFieldsCmd
-}
-
-func init() { //nolint:gochecknoinits //this is the only way to add the command to the rootCmd
- getFieldsCmd.Flags().StringVarP(&inFile, "infile", "i", "", "input filename incl path")
- getFieldsCmd.Flags().StringVarP(&svcName, "service", "s", "", "service name")
- getFieldsCmd.Flags().StringVarP(&resName, "resource", "r", "", "resource name")
- getFieldsCmd.Flags().StringVarP(
- &resType,
- "type",
- "t",
- "resource",
- "resource type (data-source or resource [default])",
- )
-}
diff --git a/generator/cmd/publish/templates/Caddyfile b/generator/cmd/publish/templates/Caddyfile
deleted file mode 100644
index 5663fbf8..00000000
--- a/generator/cmd/publish/templates/Caddyfile
+++ /dev/null
@@ -1,38 +0,0 @@
-{
- log {
- level debug
- }
-
-
- filesystem tf s3 {
- bucket "terraform-provider-privatepreview"
- region eu01
- endpoint https://object.storage.eu01.onstackit.cloud
- use_path_style
- }
-}
-
-tfregistry.sysops.stackit.rocks {
- encode zstd gzip
-
- handle_path /docs/* {
- root /srv/www
- templates
-
- @md {
- file {path}
- path *.md
- }
-
- rewrite @md /markdown.html
-
- file_server {
- browse
- }
- }
-
- file_server {
- fs tf
- browse
- }
-}
diff --git a/generator/cmd/publish/templates/index.html.gompl b/generator/cmd/publish/templates/index.html.gompl
deleted file mode 100644
index 531032fe..00000000
--- a/generator/cmd/publish/templates/index.html.gompl
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
- Forwarding | Weiterleitung
-
-
-
-Falls Sie nicht automatisch weitergeleitet werden, klicken Sie bitte hier.
-Sie gelangen dann auf unsere Hauptseite
-
-
diff --git a/generator/cmd/publish/templates/index.md.gompl b/generator/cmd/publish/templates/index.md.gompl
deleted file mode 100644
index 3ebaa0e1..00000000
--- a/generator/cmd/publish/templates/index.md.gompl
+++ /dev/null
@@ -1,34 +0,0 @@
----
-page_title: STACKIT provider PrivatePreview
-description: none
----
-
-# provider
-[Provider](docs/index.md)
-
-## PostGreSQL alpha
-### data sources
-
-- [Flavor](docs/data-sources/postgresflexalpha_flavor.md)
-- [Database](docs/data-sources/postgresflexalpha_database.md)
-- [Instance](docs/data-sources/postgresflexalpha_instance.md)
-- [Flavors](docs/data-sources/postgresflexalpha_flavors.md)
-- [User](docs/data-sources/postgresflexalpha_user.md)
-
-### resources
-- [Database](docs/resources/postgresflexalpha_database.md)
-- [Instance](docs/resources/postgresflexalpha_instance.md)
-- [User](docs/resources/postgresflexalpha_user.md)
-
-## SQL Server alpha
-### data sources
-- [Database](docs/data-sources/sqlserverflexalpha_database.md)
-- [Version](docs/data-sources/sqlserverflexalpha_version.md)
-- [User](docs/data-sources/sqlserverflexalpha_user.md)
-- [Flavor](docs/data-sources/sqlserverflexalpha_flavor.md)
-- [Instance](docs/data-sources/sqlserverflexalpha_instance.md)
-
-### resources
-- [Database](docs/resources/sqlserverflexalpha_database.md)
-- [User](docs/resources/sqlserverflexalpha_user.md)
-- [Instance](docs/resources/sqlserverflexalpha_instance.md)
diff --git a/generator/cmd/publish/templates/markdown.html.gompl b/generator/cmd/publish/templates/markdown.html.gompl
deleted file mode 100644
index d338b241..00000000
--- a/generator/cmd/publish/templates/markdown.html.gompl
+++ /dev/null
@@ -1,79 +0,0 @@
-
-{{ $mdFile := .OriginalReq.URL.Path | trimPrefix "/docs" }}
-{{ $md := (include $mdFile | splitFrontMatter) }}
-
-
- {{$md.Meta.page_title}}
-
-
-
-
-{{$md.Meta.page_title}}
-
-
-
-
-
-
-
-
-
-
- {{markdown $md.Body}}
-
-
-
-
-
-
-
-
-
-
diff --git a/generator/main.go b/generator/main.go
deleted file mode 100644
index 44e11c23..00000000
--- a/generator/main.go
+++ /dev/null
@@ -1,40 +0,0 @@
-package main
-
-import (
- "log"
- "log/slog"
- "os"
-
- "github.com/SladkyCitron/slogcolor"
- cc "github.com/ivanpirog/coloredcobra"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/generator/cmd"
-)
-
-func main() {
- slog.SetDefault(slog.New(slogcolor.NewHandler(os.Stderr, slogcolor.DefaultOptions)))
-
- rootCmd := cmd.NewRootCmd()
-
- cc.Init(&cc.Config{
- RootCmd: rootCmd,
- Headings: cc.HiCyan + cc.Bold + cc.Underline,
- Commands: cc.HiYellow + cc.Bold,
- Example: cc.Italic,
- ExecName: cc.Bold,
- Flags: cc.Bold,
- })
- rootCmd.SetOut(os.Stdout)
-
- rootCmd.AddCommand(
- cmd.NewBuildCmd(),
- cmd.NewPublishCmd(),
- cmd.NewGetFieldsCmd(),
- cmd.NewExamplesCmd(),
- )
-
- err := rootCmd.Execute()
- if err != nil {
- log.Fatal(err)
- }
-}
diff --git a/go.mod b/go.mod
index 4a7ad690..d827c584 100644
--- a/go.mod
+++ b/go.mod
@@ -3,286 +3,82 @@ module tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stac
go 1.25.6
require (
- github.com/SladkyCitron/slogcolor v1.8.0
- github.com/golang-jwt/jwt/v5 v5.3.1
github.com/google/go-cmp v0.7.0
github.com/google/uuid v1.6.0
- github.com/hashicorp/terraform-plugin-framework v1.18.0
+ github.com/hashicorp/terraform-plugin-framework v1.17.0
github.com/hashicorp/terraform-plugin-framework-validators v0.19.0
- github.com/hashicorp/terraform-plugin-go v0.30.0
+ github.com/hashicorp/terraform-plugin-go v0.29.0
github.com/hashicorp/terraform-plugin-log v0.10.0
github.com/hashicorp/terraform-plugin-testing v1.14.0
github.com/iancoleman/strcase v0.3.0
- github.com/ivanpirog/coloredcobra v1.0.1
- github.com/jarcoal/httpmock v1.4.1
- github.com/joho/godotenv v1.5.1
github.com/ldez/go-git-cmd-wrapper/v2 v2.9.1
github.com/spf13/cobra v1.10.2
- github.com/stackitcloud/stackit-sdk-go/core v0.22.0
- github.com/stackitcloud/stackit-sdk-go/services/postgresflex v1.4.0
- github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex v1.5.0
+ github.com/stackitcloud/stackit-sdk-go/core v0.21.0
+ github.com/stackitcloud/stackit-sdk-go/services/iaasalpha v0.1.23-alpha
+ github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex v1.4.1
github.com/teambition/rrule-go v1.8.2
gopkg.in/yaml.v3 v3.0.1
)
-require github.com/hashicorp/go-retryablehttp v0.7.8 // indirect
+require (
+ github.com/hashicorp/go-retryablehttp v0.7.8 // indirect
+ golang.org/x/telemetry v0.0.0-20260116145544-c6413dc483f5 // indirect
+)
require (
- 4d63.com/gocheckcompilerdirectives v1.3.0 // indirect
- 4d63.com/gochecknoglobals v0.2.2 // indirect
- codeberg.org/chavacava/garif v0.2.0 // indirect
- codeberg.org/polyfloyd/go-errorlint v1.9.0 // indirect
dario.cat/mergo v1.0.1 // indirect
- dev.gaijin.team/go/exhaustruct/v4 v4.0.0 // indirect
- dev.gaijin.team/go/golib v0.6.0 // indirect
- github.com/4meepo/tagalign v1.4.3 // indirect
- github.com/Abirdcfly/dupword v0.1.7 // indirect
- github.com/AdminBenni/iota-mixing v1.0.0 // indirect
- github.com/AlwxSin/noinlineerr v1.0.5 // indirect
- github.com/Antonboom/errname v1.1.1 // indirect
- github.com/Antonboom/nilnil v1.1.1 // indirect
- github.com/Antonboom/testifylint v1.6.4 // indirect
- github.com/BurntSushi/toml v1.6.0 // indirect
- github.com/Djarvur/go-err113 v0.1.1 // indirect
- github.com/Kunde21/markdownfmt/v3 v3.1.0 // indirect
- github.com/Masterminds/goutils v1.1.1 // indirect
- github.com/Masterminds/semver/v3 v3.4.0 // indirect
- github.com/Masterminds/sprig/v3 v3.2.3 // indirect
- github.com/MirrexOne/unqueryvet v1.5.4 // indirect
- github.com/OpenPeeDeeP/depguard/v2 v2.2.1 // indirect
- github.com/ProtonMail/go-crypto v1.4.0 // indirect
+ github.com/ProtonMail/go-crypto v1.3.0 // indirect
github.com/agext/levenshtein v1.2.3 // indirect
- github.com/alecthomas/chroma/v2 v2.23.1 // indirect
- github.com/alecthomas/go-check-sumtype v0.3.1 // indirect
- github.com/alexkohler/nakedret/v2 v2.0.6 // indirect
- github.com/alexkohler/prealloc v1.1.0 // indirect
- github.com/alfatraining/structtag v1.0.0 // indirect
- github.com/alingse/asasalint v0.0.11 // indirect
- github.com/alingse/nilnesserr v0.2.0 // indirect
github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect
- github.com/armon/go-radix v1.0.0 // indirect
- github.com/ashanbrown/forbidigo/v2 v2.3.0 // indirect
- github.com/ashanbrown/makezero/v2 v2.1.0 // indirect
- github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
- github.com/beorn7/perks v1.0.1 // indirect
- github.com/bgentry/speakeasy v0.1.0 // indirect
- github.com/bkielbasa/cyclop v1.2.3 // indirect
- github.com/blizzy78/varnamelen v0.8.0 // indirect
- github.com/bmatcuk/doublestar/v4 v4.9.1 // indirect
- github.com/bombsimon/wsl/v4 v4.7.0 // indirect
- github.com/bombsimon/wsl/v5 v5.6.0 // indirect
- github.com/breml/bidichk v0.3.3 // indirect
- github.com/breml/errchkjson v0.4.1 // indirect
- github.com/butuzov/ireturn v0.4.0 // indirect
- github.com/butuzov/mirror v1.3.0 // indirect
- github.com/catenacyber/perfsprint v0.10.1 // indirect
- github.com/ccojocar/zxcvbn-go v1.0.4 // indirect
- github.com/cespare/xxhash/v2 v2.3.0 // indirect
- github.com/charithe/durationcheck v0.0.11 // indirect
- github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc // indirect
- github.com/charmbracelet/lipgloss v1.1.0 // indirect
- github.com/charmbracelet/x/ansi v0.10.1 // indirect
- github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd // indirect
- github.com/charmbracelet/x/term v0.2.1 // indirect
- github.com/ckaznocha/intrange v0.3.1 // indirect
- github.com/cloudflare/circl v1.6.3 // indirect
- github.com/curioswitch/go-reassign v0.3.0 // indirect
- github.com/daixiang0/gci v0.13.7 // indirect
- github.com/dave/dst v0.27.3 // indirect
- github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
- github.com/denis-tingaikin/go-header v0.5.0 // indirect
- github.com/dlclark/regexp2 v1.11.5 // indirect
- github.com/ettle/strcase v0.2.0 // indirect
+ github.com/cloudflare/circl v1.6.2 // indirect
github.com/fatih/color v1.18.0 // indirect
- github.com/fatih/structtag v1.2.0 // indirect
- github.com/firefart/nonamedreturns v1.0.6 // indirect
- github.com/fsnotify/fsnotify v1.5.4 // indirect
- github.com/fzipp/gocyclo v0.6.0 // indirect
- github.com/ghostiam/protogetter v0.3.20 // indirect
- github.com/go-critic/go-critic v0.14.3 // indirect
- github.com/go-toolsmith/astcast v1.1.0 // indirect
- github.com/go-toolsmith/astcopy v1.1.0 // indirect
- github.com/go-toolsmith/astequal v1.2.0 // indirect
- github.com/go-toolsmith/astfmt v1.1.0 // indirect
- github.com/go-toolsmith/astp v1.1.0 // indirect
- github.com/go-toolsmith/strparse v1.1.0 // indirect
- github.com/go-toolsmith/typep v1.1.0 // indirect
- github.com/go-viper/mapstructure/v2 v2.5.0 // indirect
- github.com/go-xmlfmt/xmlfmt v1.1.3 // indirect
- github.com/gobwas/glob v0.2.3 // indirect
- github.com/godoc-lint/godoc-lint v0.11.2 // indirect
- github.com/gofrs/flock v0.13.0 // indirect
+ github.com/golang-jwt/jwt/v5 v5.3.0 // indirect
github.com/golang/protobuf v1.5.4 // indirect
- github.com/golangci/asciicheck v0.5.0 // indirect
- github.com/golangci/dupl v0.0.0-20250308024227-f665c8d69b32 // indirect
- github.com/golangci/go-printf-func-name v0.1.1 // indirect
- github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d // indirect
- github.com/golangci/golangci-lint/v2 v2.11.2 // indirect
- github.com/golangci/golines v0.15.0 // indirect
- github.com/golangci/misspell v0.8.0 // indirect
- github.com/golangci/plugin-module-register v0.1.2 // indirect
- github.com/golangci/revgrep v0.8.0 // indirect
- github.com/golangci/swaggoswag v0.0.0-20250504205917-77f2aca3143e // indirect
- github.com/golangci/unconvert v0.0.0-20250410112200-a129a6e6413e // indirect
- github.com/gordonklaus/ineffassign v0.2.0 // indirect
- github.com/gostaticanalysis/analysisutil v0.7.1 // indirect
- github.com/gostaticanalysis/comment v1.5.0 // indirect
- github.com/gostaticanalysis/forcetypeassert v0.2.0 // indirect
- github.com/gostaticanalysis/nilerr v0.1.2 // indirect
- github.com/hashicorp/cli v1.1.7 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/go-checkpoint v0.5.0 // indirect
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
github.com/hashicorp/go-cty v1.5.0 // indirect
github.com/hashicorp/go-hclog v1.6.3 // indirect
- github.com/hashicorp/go-immutable-radix/v2 v2.1.0 // indirect
github.com/hashicorp/go-multierror v1.1.1 // indirect
github.com/hashicorp/go-plugin v1.7.0 // indirect
github.com/hashicorp/go-uuid v1.0.3 // indirect
github.com/hashicorp/go-version v1.8.0 // indirect
- github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
- github.com/hashicorp/hc-install v0.9.3 // indirect
- github.com/hashicorp/hcl v1.0.0 // indirect
+ github.com/hashicorp/hc-install v0.9.2 // indirect
github.com/hashicorp/hcl/v2 v2.24.0 // indirect
github.com/hashicorp/logutils v1.0.0 // indirect
- github.com/hashicorp/terraform-exec v0.25.0 // indirect
+ github.com/hashicorp/terraform-exec v0.24.0 // indirect
github.com/hashicorp/terraform-json v0.27.2 // indirect
- github.com/hashicorp/terraform-plugin-docs v0.24.0 // indirect
- github.com/hashicorp/terraform-plugin-sdk/v2 v2.39.0 // indirect
+ github.com/hashicorp/terraform-plugin-sdk/v2 v2.38.1 // indirect
github.com/hashicorp/terraform-registry-address v0.4.0 // indirect
- github.com/hashicorp/terraform-svchost v0.2.1 // indirect
+ github.com/hashicorp/terraform-svchost v0.2.0 // indirect
github.com/hashicorp/yamux v0.1.2 // indirect
- github.com/hexops/gotextdiff v1.0.3 // indirect
- github.com/huandu/xstrings v1.3.3 // indirect
- github.com/imdario/mergo v0.3.15 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
- github.com/jgautheron/goconst v1.8.2 // indirect
- github.com/jingyugao/rowserrcheck v1.1.1 // indirect
- github.com/jjti/go-spancheck v0.6.5 // indirect
- github.com/julz/importas v0.2.0 // indirect
- github.com/karamaru-alpha/copyloopvar v1.2.2 // indirect
- github.com/kisielk/errcheck v1.10.0 // indirect
- github.com/kkHAIKE/contextcheck v1.1.6 // indirect
github.com/kr/text v0.2.0 // indirect
- github.com/kulti/thelper v0.7.1 // indirect
- github.com/kunwardeep/paralleltest v1.0.15 // indirect
- github.com/lasiar/canonicalheader v1.1.2 // indirect
- github.com/ldez/exptostd v0.4.5 // indirect
- github.com/ldez/gomoddirectives v0.8.0 // indirect
- github.com/ldez/grignotin v0.10.1 // indirect
- github.com/ldez/structtags v0.6.1 // indirect
- github.com/ldez/tagliatelle v0.7.2 // indirect
- github.com/ldez/usetesting v0.5.0 // indirect
- github.com/leonklingele/grouper v1.1.2 // indirect
- github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
- github.com/macabu/inamedparam v0.2.0 // indirect
- github.com/magiconair/properties v1.8.6 // indirect
- github.com/manuelarte/embeddedstructfieldcheck v0.4.0 // indirect
- github.com/manuelarte/funcorder v0.5.0 // indirect
- github.com/maratori/testableexamples v1.0.1 // indirect
- github.com/maratori/testpackage v1.1.2 // indirect
- github.com/matoous/godox v1.1.0 // indirect
github.com/mattn/go-colorable v0.1.14 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
- github.com/mattn/go-runewidth v0.0.16 // indirect
- github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect
- github.com/mgechev/revive v1.15.0 // indirect
github.com/mitchellh/copystructure v1.2.0 // indirect
- github.com/mitchellh/go-homedir v1.1.0 // indirect
github.com/mitchellh/go-testing-interface v1.14.1 // indirect
github.com/mitchellh/go-wordwrap v1.0.1 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/mitchellh/reflectwalk v1.0.2 // indirect
- github.com/moricho/tparallel v0.3.2 // indirect
- github.com/muesli/termenv v0.16.0 // indirect
- github.com/nakabonne/nestif v0.3.1 // indirect
- github.com/nishanths/exhaustive v0.12.0 // indirect
- github.com/nishanths/predeclared v0.2.2 // indirect
- github.com/nunnatsa/ginkgolinter v0.23.0 // indirect
github.com/oklog/run v1.2.0 // indirect
- github.com/pelletier/go-toml v1.9.5 // indirect
- github.com/pelletier/go-toml/v2 v2.2.4 // indirect
- github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
- github.com/posener/complete v1.2.3 // indirect
- github.com/prometheus/client_golang v1.12.1 // indirect
- github.com/prometheus/client_model v0.2.0 // indirect
- github.com/prometheus/common v0.32.1 // indirect
- github.com/prometheus/procfs v0.7.3 // indirect
- github.com/quasilyte/go-ruleguard v0.4.5 // indirect
- github.com/quasilyte/go-ruleguard/dsl v0.3.23 // indirect
- github.com/quasilyte/gogrep v0.5.0 // indirect
- github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727 // indirect
- github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 // indirect
- github.com/raeperd/recvcheck v0.2.0 // indirect
- github.com/rivo/uniseg v0.4.7 // indirect
- github.com/rogpeppe/go-internal v1.14.1 // indirect
- github.com/ryancurrah/gomodguard v1.4.1 // indirect
- github.com/ryanrolds/sqlclosecheck v0.5.1 // indirect
- github.com/sanposhiho/wastedassign/v2 v2.1.0 // indirect
- github.com/santhosh-tekuri/jsonschema/v6 v6.0.2 // indirect
- github.com/sashamelentyev/interfacebloat v1.1.0 // indirect
- github.com/sashamelentyev/usestdlibvars v1.29.0 // indirect
- github.com/securego/gosec/v2 v2.24.7 // indirect
- github.com/shopspring/decimal v1.3.1 // indirect
- github.com/sirupsen/logrus v1.9.4 // indirect
- github.com/sivchari/containedctx v1.0.3 // indirect
- github.com/sonatard/noctx v0.5.0 // indirect
- github.com/sourcegraph/go-diff v0.7.0 // indirect
- github.com/spf13/afero v1.15.0 // indirect
- github.com/spf13/cast v1.5.0 // indirect
- github.com/spf13/jwalterweatherman v1.1.0 // indirect
github.com/spf13/pflag v1.0.10 // indirect
- github.com/spf13/viper v1.12.0 // indirect
- github.com/ssgreg/nlreturn/v2 v2.2.1 // indirect
- github.com/stbenjam/no-sprintf-host-port v0.3.1 // indirect
- github.com/stretchr/objx v0.5.2 // indirect
github.com/stretchr/testify v1.11.1 // indirect
- github.com/subosito/gotenv v1.4.1 // indirect
- github.com/tetafro/godot v1.5.4 // indirect
- github.com/timakin/bodyclose v0.0.0-20241222091800-1db5c5ca4d67 // indirect
- github.com/timonwong/loggercheck v0.11.0 // indirect
- github.com/tomarrell/wrapcheck/v2 v2.12.0 // indirect
- github.com/tommy-muehle/go-mnd/v2 v2.5.1 // indirect
- github.com/ultraware/funlen v0.2.0 // indirect
- github.com/ultraware/whitespace v0.2.0 // indirect
- github.com/uudashr/gocognit v1.2.1 // indirect
- github.com/uudashr/iface v1.4.1 // indirect
github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect
github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
- github.com/xen0n/gosmopolitan v1.3.0 // indirect
- github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
- github.com/yagipy/maintidx v1.0.0 // indirect
- github.com/yeya24/promlinter v0.3.0 // indirect
- github.com/ykadowak/zerologlint v0.1.5 // indirect
- github.com/yuin/goldmark v1.7.7 // indirect
- github.com/yuin/goldmark-meta v1.1.0 // indirect
- github.com/zclconf/go-cty v1.18.0 // indirect
- gitlab.com/bosi/decorder v0.4.2 // indirect
- go-simpler.org/musttag v0.14.0 // indirect
- go-simpler.org/sloglint v0.11.1 // indirect
- go.abhg.dev/goldmark/frontmatter v0.2.0 // indirect
- go.augendre.info/arangolint v0.4.0 // indirect
- go.augendre.info/fatcontext v0.9.0 // indirect
- go.uber.org/multierr v1.10.0 // indirect
- go.uber.org/zap v1.27.0 // indirect
- go.yaml.in/yaml/v3 v3.0.4 // indirect
- golang.org/x/crypto v0.48.0 // indirect
- golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b // indirect
- golang.org/x/exp/typeparams v0.0.0-20260209203927-2842357ff358 // indirect
- golang.org/x/mod v0.33.0 // indirect
- golang.org/x/net v0.51.0 // indirect
+ github.com/zclconf/go-cty v1.17.0 // indirect
+ golang.org/x/crypto v0.47.0 // indirect
+ golang.org/x/mod v0.32.0 // indirect
+ golang.org/x/net v0.49.0 // indirect
golang.org/x/sync v0.19.0 // indirect
- golang.org/x/sys v0.41.0 // indirect
- golang.org/x/text v0.34.0 // indirect
- golang.org/x/tools v0.42.0 // indirect
+ golang.org/x/sys v0.40.0 // indirect
+ golang.org/x/text v0.33.0 // indirect
+ golang.org/x/tools v0.41.0 // indirect
google.golang.org/appengine v1.6.8 // indirect
- google.golang.org/genproto/googleapis/rpc v0.0.0-20260226221140-a57be14db171 // indirect
- google.golang.org/grpc v1.79.2 // indirect
+ google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 // indirect
+ google.golang.org/grpc v1.78.0 // indirect
google.golang.org/protobuf v1.36.11 // indirect
- gopkg.in/ini.v1 v1.67.0 // indirect
- gopkg.in/yaml.v2 v2.4.0 // indirect
- honnef.co/go/tools v0.7.0 // indirect
- mvdan.cc/gofumpt v0.9.2 // indirect
- mvdan.cc/unparam v0.0.0-20251027182757-5beb8c8f8f15 // indirect
)
+
+tool golang.org/x/tools/cmd/goimports
diff --git a/go.sum b/go.sum
index f0894d33..cd787442 100644
--- a/go.sum
+++ b/go.sum
@@ -1,373 +1,58 @@
-4d63.com/gocheckcompilerdirectives v1.3.0 h1:Ew5y5CtcAAQeTVKUVFrE7EwHMrTO6BggtEj8BZSjZ3A=
-4d63.com/gocheckcompilerdirectives v1.3.0/go.mod h1:ofsJ4zx2QAuIP/NO/NAh1ig6R1Fb18/GI7RVMwz7kAY=
-4d63.com/gochecknoglobals v0.2.2 h1:H1vdnwnMaZdQW/N+NrkT1SZMTBmcwHe9Vq8lJcYYTtU=
-4d63.com/gochecknoglobals v0.2.2/go.mod h1:lLxwTQjL5eIesRbvnzIP3jZtG140FnTdz+AlMa+ogt0=
-cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
-cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
-cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU=
-cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU=
-cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY=
-cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc=
-cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0=
-cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To=
-cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4=
-cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M=
-cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc=
-cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk=
-cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs=
-cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc=
-cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY=
-cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o=
-cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE=
-cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc=
-cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg=
-cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc=
-cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ=
-cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE=
-cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk=
-cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I=
-cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw=
-cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA=
-cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU=
-cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw=
-cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos=
-cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk=
-cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs=
-cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
-codeberg.org/chavacava/garif v0.2.0 h1:F0tVjhYbuOCnvNcU3YSpO6b3Waw6Bimy4K0mM8y6MfY=
-codeberg.org/chavacava/garif v0.2.0/go.mod h1:P2BPbVbT4QcvLZrORc2T29szK3xEOlnl0GiPTJmEqBQ=
-codeberg.org/polyfloyd/go-errorlint v1.9.0 h1:VkdEEmA1VBpH6ecQoMR4LdphVI3fA4RrCh2an7YmodI=
-codeberg.org/polyfloyd/go-errorlint v1.9.0/go.mod h1:GPRRu2LzVijNn4YkrZYJfatQIdS+TrcK8rL5Xs24qw8=
dario.cat/mergo v1.0.1 h1:Ra4+bf83h2ztPIQYNP99R6m+Y7KfnARDfID+a+vLl4s=
dario.cat/mergo v1.0.1/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk=
-dev.gaijin.team/go/exhaustruct/v4 v4.0.0 h1:873r7aNneqoBB3IaFIzhvt2RFYTuHgmMjoKfwODoI1Y=
-dev.gaijin.team/go/exhaustruct/v4 v4.0.0/go.mod h1:aZ/k2o4Y05aMJtiux15x8iXaumE88YdiB0Ai4fXOzPI=
-dev.gaijin.team/go/golib v0.6.0 h1:v6nnznFTs4bppib/NyU1PQxobwDHwCXXl15P7DV5Zgo=
-dev.gaijin.team/go/golib v0.6.0/go.mod h1:uY1mShx8Z/aNHWDyAkZTkX+uCi5PdX7KsG1eDQa2AVE=
-dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
-github.com/4meepo/tagalign v1.4.3 h1:Bnu7jGWwbfpAie2vyl63Zup5KuRv21olsPIha53BJr8=
-github.com/4meepo/tagalign v1.4.3/go.mod h1:00WwRjiuSbrRJnSVeGWPLp2epS5Q/l4UEy0apLLS37c=
-github.com/Abirdcfly/dupword v0.1.7 h1:2j8sInznrje4I0CMisSL6ipEBkeJUJAmK1/lfoNGWrQ=
-github.com/Abirdcfly/dupword v0.1.7/go.mod h1:K0DkBeOebJ4VyOICFdppB23Q0YMOgVafM0zYW0n9lF4=
-github.com/AdminBenni/iota-mixing v1.0.0 h1:Os6lpjG2dp/AE5fYBPAA1zfa2qMdCAWwPMCgpwKq7wo=
-github.com/AdminBenni/iota-mixing v1.0.0/go.mod h1:i4+tpAaB+qMVIV9OK3m4/DAynOd5bQFaOu+2AhtBCNY=
-github.com/AlwxSin/noinlineerr v1.0.5 h1:RUjt63wk1AYWTXtVXbSqemlbVTb23JOSRiNsshj7TbY=
-github.com/AlwxSin/noinlineerr v1.0.5/go.mod h1:+QgkkoYrMH7RHvcdxdlI7vYYEdgeoFOVjU9sUhw/rQc=
-github.com/Antonboom/errname v1.1.1 h1:bllB7mlIbTVzO9jmSWVWLjxTEbGBVQ1Ff/ClQgtPw9Q=
-github.com/Antonboom/errname v1.1.1/go.mod h1:gjhe24xoxXp0ScLtHzjiXp0Exi1RFLKJb0bVBtWKCWQ=
-github.com/Antonboom/nilnil v1.1.1 h1:9Mdr6BYd8WHCDngQnNVV0b554xyisFioEKi30sksufQ=
-github.com/Antonboom/nilnil v1.1.1/go.mod h1:yCyAmSw3doopbOWhJlVci+HuyNRuHJKIv6V2oYQa8II=
-github.com/Antonboom/testifylint v1.6.4 h1:gs9fUEy+egzxkEbq9P4cpcMB6/G0DYdMeiFS87UiqmQ=
-github.com/Antonboom/testifylint v1.6.4/go.mod h1:YO33FROXX2OoUfwjz8g+gUxQXio5i9qpVy7nXGbxDD4=
-github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
-github.com/BurntSushi/toml v1.2.1 h1:9F2/+DoOYIOksmaJFPw1tGFy1eDnIJXg+UHjuD8lTak=
-github.com/BurntSushi/toml v1.2.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ=
-github.com/BurntSushi/toml v1.6.0 h1:dRaEfpa2VI55EwlIW72hMRHdWouJeRF7TPYhI+AUQjk=
-github.com/BurntSushi/toml v1.6.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho=
-github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
-github.com/Djarvur/go-err113 v0.1.1 h1:eHfopDqXRwAi+YmCUas75ZE0+hoBHJ2GQNLYRSxao4g=
-github.com/Djarvur/go-err113 v0.1.1/go.mod h1:IaWJdYFLg76t2ihfflPZnM1LIQszWOsFDh2hhhAVF6k=
-github.com/Kunde21/markdownfmt/v3 v3.1.0 h1:KiZu9LKs+wFFBQKhrZJrFZwtLnCCWJahL+S+E/3VnM0=
-github.com/Kunde21/markdownfmt/v3 v3.1.0/go.mod h1:tPXN1RTyOzJwhfHoon9wUr4HGYmWgVxSQN6VBJDkrVc=
-github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI=
-github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU=
-github.com/Masterminds/semver/v3 v3.2.0 h1:3MEsd0SM6jqZojhjLWWeBY+Kcjy9i6MQAeY7YgDP83g=
-github.com/Masterminds/semver/v3 v3.2.0/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ=
-github.com/Masterminds/semver/v3 v3.4.0 h1:Zog+i5UMtVoCU8oKka5P7i9q9HgrJeGzI9SA1Xbatp0=
-github.com/Masterminds/semver/v3 v3.4.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM=
-github.com/Masterminds/sprig/v3 v3.2.3 h1:eL2fZNezLomi0uOLqjQoN6BfsDD+fyLtgbJMAj9n6YA=
-github.com/Masterminds/sprig/v3 v3.2.3/go.mod h1:rXcFaZ2zZbLRJv/xSysmlgIM1u11eBaRMhvYXJNkGuM=
github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY=
github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU=
-github.com/MirrexOne/unqueryvet v1.5.4 h1:38QOxShO7JmMWT+eCdDMbcUgGCOeJphVkzzRgyLJgsQ=
-github.com/MirrexOne/unqueryvet v1.5.4/go.mod h1:fs9Zq6eh1LRIhsDIsxf9PONVUjYdFHdtkHIgZdJnyPU=
-github.com/OpenPeeDeeP/depguard/v2 v2.2.1 h1:vckeWVESWp6Qog7UZSARNqfu/cZqvki8zsuj3piCMx4=
-github.com/OpenPeeDeeP/depguard/v2 v2.2.1/go.mod h1:q4DKzC4UcVaAvcfd41CZh0PWpGgzrVxUYBlgKNGquUo=
-github.com/ProtonMail/go-crypto v1.4.0 h1:Zq/pbM3F5DFgJiMouxEdSVY44MVoQNEKp5d5QxIQceQ=
-github.com/ProtonMail/go-crypto v1.4.0/go.mod h1:e1OaTyu5SYVrO9gKOEhTc+5UcXtTUa+P3uLudwcgPqo=
-github.com/SladkyCitron/slogcolor v1.8.0 h1:ln4mUPfVhs7a/vZfjnKkz5YZ71Bg/KFWneS2hfFq6FM=
-github.com/SladkyCitron/slogcolor v1.8.0/go.mod h1:ft8LEVIl4isUkebakhv+ngNXJjWBumnwhXfxTLApf3M=
+github.com/ProtonMail/go-crypto v1.3.0 h1:ILq8+Sf5If5DCpHQp4PbZdS1J7HDFRXz/+xKBiRGFrw=
+github.com/ProtonMail/go-crypto v1.3.0/go.mod h1:9whxjD8Rbs29b4XWbB8irEcE8KHMqaR2e7GWU1R+/PE=
github.com/agext/levenshtein v1.2.3 h1:YB2fHEn0UJagG8T1rrWknE3ZQzWM06O8AMAatNn7lmo=
github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558=
-github.com/alecthomas/chroma/v2 v2.23.1 h1:nv2AVZdTyClGbVQkIzlDm/rnhk1E9bU9nXwmZ/Vk/iY=
-github.com/alecthomas/chroma/v2 v2.23.1/go.mod h1:NqVhfBR0lte5Ouh3DcthuUCTUpDC9cxBOfyMbMQPs3o=
-github.com/alecthomas/go-check-sumtype v0.3.1 h1:u9aUvbGINJxLVXiFvHUlPEaD7VDULsrxJb4Aq31NLkU=
-github.com/alecthomas/go-check-sumtype v0.3.1/go.mod h1:A8TSiN3UPRw3laIgWEUOHHLPa6/r9MtoigdlP5h3K/E=
-github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
-github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
-github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
-github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
-github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
-github.com/alexkohler/nakedret/v2 v2.0.6 h1:ME3Qef1/KIKr3kWX3nti3hhgNxw6aqN5pZmQiFSsuzQ=
-github.com/alexkohler/nakedret/v2 v2.0.6/go.mod h1:l3RKju/IzOMQHmsEvXwkqMDzHHvurNQfAgE1eVmT40Q=
-github.com/alexkohler/prealloc v1.1.0 h1:cKGRBqlXw5iyQGLYhrXrDlcHxugXpTq4tQ5c91wkf8M=
-github.com/alexkohler/prealloc v1.1.0/go.mod h1:fT39Jge3bQrfA7nPMDngUfvUbQGQeJyGQnR+913SCig=
-github.com/alfatraining/structtag v1.0.0 h1:2qmcUqNcCoyVJ0up879K614L9PazjBSFruTB0GOFjCc=
-github.com/alfatraining/structtag v1.0.0/go.mod h1:p3Xi5SwzTi+Ryj64DqjLWz7XurHxbGsq6y3ubePJPus=
-github.com/alingse/asasalint v0.0.11 h1:SFwnQXJ49Kx/1GghOFz1XGqHYKp21Kq1nHad/0WQRnw=
-github.com/alingse/asasalint v0.0.11/go.mod h1:nCaoMhw7a9kSJObvQyVzNTPBDbNpdocqrSP7t/cW5+I=
-github.com/alingse/nilnesserr v0.2.0 h1:raLem5KG7EFVb4UIDAXgrv3N2JIaffeKNtcEXkEWd/w=
-github.com/alingse/nilnesserr v0.2.0/go.mod h1:1xJPrXonEtX7wyTq8Dytns5P2hNzoWymVUIaKm4HNFg=
github.com/apparentlymart/go-textseg/v12 v12.0.0/go.mod h1:S/4uRK2UtaQttw1GenVJEynmyUenKwP++x/+DdGV/Ec=
github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY=
github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4=
-github.com/armon/go-radix v1.0.0 h1:F4z6KzEeeQIMeLFa97iZU6vupzoecKdU5TX24SNppXI=
-github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
-github.com/ashanbrown/forbidigo/v2 v2.3.0 h1:OZZDOchCgsX5gvToVtEBoV2UWbFfI6RKQTir2UZzSxo=
-github.com/ashanbrown/forbidigo/v2 v2.3.0/go.mod h1:5p6VmsG5/1xx3E785W9fouMxIOkvY2rRV9nMdWadd6c=
-github.com/ashanbrown/makezero/v2 v2.1.0 h1:snuKYMbqosNokUKm+R6/+vOPs8yVAi46La7Ck6QYSaE=
-github.com/ashanbrown/makezero/v2 v2.1.0/go.mod h1:aEGT/9q3S8DHeE57C88z2a6xydvgx8J5hgXIGWgo0MY=
-github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k=
-github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8=
-github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
-github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
-github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
-github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
-github.com/bgentry/speakeasy v0.1.0 h1:ByYyxL9InA1OWqxJqqp2A5pYHUrCiAL6K3J+LKSsQkY=
-github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
-github.com/bkielbasa/cyclop v1.2.3 h1:faIVMIGDIANuGPWH031CZJTi2ymOQBULs9H21HSMa5w=
-github.com/bkielbasa/cyclop v1.2.3/go.mod h1:kHTwA9Q0uZqOADdupvcFJQtp/ksSnytRMe8ztxG8Fuo=
-github.com/blizzy78/varnamelen v0.8.0 h1:oqSblyuQvFsW1hbBHh1zfwrKe3kcSj0rnXkKzsQ089M=
-github.com/blizzy78/varnamelen v0.8.0/go.mod h1:V9TzQZ4fLJ1DSrjVDfl89H7aMnTvKkApdHeyESmyR7k=
-github.com/bmatcuk/doublestar/v4 v4.9.1 h1:X8jg9rRZmJd4yRy7ZeNDRnM+T3ZfHv15JiBJ/avrEXE=
-github.com/bmatcuk/doublestar/v4 v4.9.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc=
-github.com/bombsimon/wsl/v4 v4.7.0 h1:1Ilm9JBPRczjyUs6hvOPKvd7VL1Q++PL8M0SXBDf+jQ=
-github.com/bombsimon/wsl/v4 v4.7.0/go.mod h1:uV/+6BkffuzSAVYD+yGyld1AChO7/EuLrCF/8xTiapg=
-github.com/bombsimon/wsl/v5 v5.6.0 h1:4z+/sBqC5vUmSp1O0mS+czxwH9+LKXtCWtHH9rZGQL8=
-github.com/bombsimon/wsl/v5 v5.6.0/go.mod h1:Uqt2EfrMj2NV8UGoN1f1Y3m0NpUVCsUdrNCdet+8LvU=
-github.com/breml/bidichk v0.3.3 h1:WSM67ztRusf1sMoqH6/c4OBCUlRVTKq+CbSeo0R17sE=
-github.com/breml/bidichk v0.3.3/go.mod h1:ISbsut8OnjB367j5NseXEGGgO/th206dVa427kR8YTE=
-github.com/breml/errchkjson v0.4.1 h1:keFSS8D7A2T0haP9kzZTi7o26r7kE3vymjZNeNDRDwg=
-github.com/breml/errchkjson v0.4.1/go.mod h1:a23OvR6Qvcl7DG/Z4o0el6BRAjKnaReoPQFciAl9U3s=
github.com/bufbuild/protocompile v0.14.1 h1:iA73zAf/fyljNjQKwYzUHD6AD4R8KMasmwa/FBatYVw=
github.com/bufbuild/protocompile v0.14.1/go.mod h1:ppVdAIhbr2H8asPk6k4pY7t9zB1OU5DoEw9xY/FUi1c=
-github.com/butuzov/ireturn v0.4.0 h1:+s76bF/PfeKEdbG8b54aCocxXmi0wvYdOVsWxVO7n8E=
-github.com/butuzov/ireturn v0.4.0/go.mod h1:ghI0FrCmap8pDWZwfPisFD1vEc56VKH4NpQUxDHta70=
-github.com/butuzov/mirror v1.3.0 h1:HdWCXzmwlQHdVhwvsfBb2Au0r3HyINry3bDWLYXiKoc=
-github.com/butuzov/mirror v1.3.0/go.mod h1:AEij0Z8YMALaq4yQj9CPPVYOyJQyiexpQEQgihajRfI=
-github.com/catenacyber/perfsprint v0.10.1 h1:u7Riei30bk46XsG8nknMhKLXG9BcXz3+3tl/WpKm0PQ=
-github.com/catenacyber/perfsprint v0.10.1/go.mod h1:DJTGsi/Zufpuus6XPGJyKOTMELe347o6akPvWG9Zcsc=
-github.com/ccojocar/zxcvbn-go v1.0.4 h1:FWnCIRMXPj43ukfX000kvBZvV6raSxakYr1nzyNrUcc=
-github.com/ccojocar/zxcvbn-go v1.0.4/go.mod h1:3GxGX+rHmueTUMvm5ium7irpyjmm7ikxYFOSJB21Das=
-github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
-github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
-github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
-github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
-github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
-github.com/charithe/durationcheck v0.0.11 h1:g1/EX1eIiKS57NTWsYtHDZ/APfeXKhye1DidBcABctk=
-github.com/charithe/durationcheck v0.0.11/go.mod h1:x5iZaixRNl8ctbM+3B2RrPG5t856TxRyVQEnbIEM2X4=
-github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc h1:4pZI35227imm7yK2bGPcfpFEmuY1gc2YSTShr4iJBfs=
-github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc/go.mod h1:X4/0JoqgTIPSFcRA/P6INZzIuyqdFY5rm8tb41s9okk=
-github.com/charmbracelet/lipgloss v1.1.0 h1:vYXsiLHVkK7fp74RkV7b2kq9+zDLoEU4MZoFqR/noCY=
-github.com/charmbracelet/lipgloss v1.1.0/go.mod h1:/6Q8FR2o+kj8rz4Dq0zQc3vYf7X+B0binUUBwA0aL30=
-github.com/charmbracelet/x/ansi v0.10.1 h1:rL3Koar5XvX0pHGfovN03f5cxLbCF2YvLeyz7D2jVDQ=
-github.com/charmbracelet/x/ansi v0.10.1/go.mod h1:3RQDQ6lDnROptfpWuUVIUG64bD2g2BgntdxH0Ya5TeE=
-github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd h1:vy0GVL4jeHEwG5YOXDmi86oYw2yuYUGqz6a8sLwg0X8=
-github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd/go.mod h1:xe0nKWGd3eJgtqZRaN9RjMtK7xUYchjzPr7q6kcvCCs=
-github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ=
-github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg=
-github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
-github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
-github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
-github.com/ckaznocha/intrange v0.3.1 h1:j1onQyXvHUsPWujDH6WIjhyH26gkRt/txNlV7LspvJs=
-github.com/ckaznocha/intrange v0.3.1/go.mod h1:QVepyz1AkUoFQkpEqksSYpNpUo3c5W7nWh/s6SHIJJk=
-github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
-github.com/cloudflare/circl v1.6.3 h1:9GPOhQGF9MCYUeXyMYlqTR6a5gTrgR/fBLXvUgtVcg8=
-github.com/cloudflare/circl v1.6.3/go.mod h1:2eXP6Qfat4O/Yhh8BznvKnJ+uzEoTQ6jVKJRn81BiS4=
-github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
-github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
+github.com/cloudflare/circl v1.6.2 h1:hL7VBpHHKzrV5WTfHCaBsgx/HGbBYlgrwvNXEVDYYsQ=
+github.com/cloudflare/circl v1.6.2/go.mod h1:2eXP6Qfat4O/Yhh8BznvKnJ+uzEoTQ6jVKJRn81BiS4=
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
-github.com/curioswitch/go-reassign v0.3.0 h1:dh3kpQHuADL3cobV/sSGETA8DOv457dwl+fbBAhrQPs=
-github.com/curioswitch/go-reassign v0.3.0/go.mod h1:nApPCCTtqLJN/s8HfItCcKV0jIPwluBOvZP+dsJGA88=
github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s=
github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI=
-github.com/daixiang0/gci v0.13.7 h1:+0bG5eK9vlI08J+J/NWGbWPTNiXPG4WhNLJOkSxWITQ=
-github.com/daixiang0/gci v0.13.7/go.mod h1:812WVN6JLFY9S6Tv76twqmNqevN0pa3SX3nih0brVzQ=
-github.com/dave/dst v0.27.3 h1:P1HPoMza3cMEquVf9kKy8yXsFirry4zEnWOdYPOoIzY=
-github.com/dave/dst v0.27.3/go.mod h1:jHh6EOibnHgcUW3WjKHisiooEkYwqpHLBSX1iOBhEyc=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
-github.com/denis-tingaikin/go-header v0.5.0 h1:SRdnP5ZKvcO9KKRP1KJrhFR3RrlGuD+42t4429eC9k8=
-github.com/denis-tingaikin/go-header v0.5.0/go.mod h1:mMenU5bWrok6Wl2UsZjy+1okegmwQ3UgWl4V1D8gjlY=
-github.com/dlclark/regexp2 v1.11.5 h1:Q/sSnsKerHeCkc/jSTNq1oCm7KiVgUMZRDUoRu0JQZQ=
-github.com/dlclark/regexp2 v1.11.5/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc=
github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ=
-github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
-github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
-github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
-github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
-github.com/ettle/strcase v0.2.0 h1:fGNiVF21fHXpX1niBgk0aROov1LagYsOwV/xqKDKR/Q=
-github.com/ettle/strcase v0.2.0/go.mod h1:DajmHElDSaX76ITe3/VHVyMin4LWSJN5Z909Wp+ED1A=
github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk=
github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM=
github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU=
-github.com/fatih/structtag v1.2.0 h1:/OdNE99OxoI/PqaW/SuSK9uxxT3f/tcSZgon/ssNSx4=
-github.com/fatih/structtag v1.2.0/go.mod h1:mBJUNpUnHmRKrKlQQlmCrh5PuhftFbNv8Ys4/aAZl94=
-github.com/firefart/nonamedreturns v1.0.6 h1:vmiBcKV/3EqKY3ZiPxCINmpS431OcE1S47AQUwhrg8E=
-github.com/firefart/nonamedreturns v1.0.6/go.mod h1:R8NisJnSIpvPWheCq0mNRXJok6D8h7fagJTF8EMEwCo=
-github.com/fsnotify/fsnotify v1.5.4 h1:jRbGcIw6P2Meqdwuo0H1p6JVLbL5DHKAKlYndzMwVZI=
-github.com/fsnotify/fsnotify v1.5.4/go.mod h1:OVB6XrOHzAwXMpEM7uPOzcehqUV2UqJxmVXmkdnm1bU=
-github.com/fzipp/gocyclo v0.6.0 h1:lsblElZG7d3ALtGMx9fmxeTKZaLLpU8mET09yN4BBLo=
-github.com/fzipp/gocyclo v0.6.0/go.mod h1:rXPyn8fnlpa0R2csP/31uerbiVBugk5whMdlyaLkLoA=
-github.com/ghostiam/protogetter v0.3.20 h1:oW7OPFit2FxZOpmMRPP9FffU4uUpfeE/rEdE1f+MzD0=
-github.com/ghostiam/protogetter v0.3.20/go.mod h1:FjIu5Yfs6FT391m+Fjp3fbAYJ6rkL/J6ySpZBfnODuI=
-github.com/go-critic/go-critic v0.14.3 h1:5R1qH2iFeo4I/RJU8vTezdqs08Egi4u5p6vOESA0pog=
-github.com/go-critic/go-critic v0.14.3/go.mod h1:xwntfW6SYAd7h1OqDzmN6hBX/JxsEKl5up/Y2bsxgVQ=
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI=
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic=
github.com/go-git/go-billy/v5 v5.6.2 h1:6Q86EsPXMa7c3YZ3aLAQsMA0VlWmy43r6FHqa/UNbRM=
github.com/go-git/go-billy/v5 v5.6.2/go.mod h1:rcFC2rAsp/erv7CMz9GczHcuD0D32fWzH+MJAU+jaUU=
-github.com/go-git/go-git/v5 v5.16.5 h1:mdkuqblwr57kVfXri5TTH+nMFLNUxIj9Z7F5ykFbw5s=
-github.com/go-git/go-git/v5 v5.16.5/go.mod h1:QOMLpNf1qxuSY4StA/ArOdfFR2TrKEjJiye2kel2m+M=
-github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
-github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
-github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
-github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
-github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
-github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY=
-github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
-github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
-github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A=
+github.com/go-git/go-git/v5 v5.14.0 h1:/MD3lCrGjCen5WfEAzKg00MJJffKhC8gzS80ycmCi60=
+github.com/go-git/go-git/v5 v5.14.0/go.mod h1:Z5Xhoia5PcWA3NF8vRLURn9E5FRhSl7dGj9ItW3Wk5k=
github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI=
github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
-github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68=
github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA=
-github.com/go-toolsmith/astcast v1.1.0 h1:+JN9xZV1A+Re+95pgnMgDboWNVnIMMQXwfBwLRPgSC8=
-github.com/go-toolsmith/astcast v1.1.0/go.mod h1:qdcuFWeGGS2xX5bLM/c3U9lewg7+Zu4mr+xPwZIB4ZU=
-github.com/go-toolsmith/astcopy v1.1.0 h1:YGwBN0WM+ekI/6SS6+52zLDEf8Yvp3n2seZITCUBt5s=
-github.com/go-toolsmith/astcopy v1.1.0/go.mod h1:hXM6gan18VA1T/daUEHCFcYiW8Ai1tIwIzHY6srfEAw=
-github.com/go-toolsmith/astequal v1.0.3/go.mod h1:9Ai4UglvtR+4up+bAD4+hCj7iTo4m/OXVTSLnCyTAx4=
-github.com/go-toolsmith/astequal v1.1.0/go.mod h1:sedf7VIdCL22LD8qIvv7Nn9MuWJruQA/ysswh64lffQ=
-github.com/go-toolsmith/astequal v1.2.0 h1:3Fs3CYZ1k9Vo4FzFhwwewC3CHISHDnVUPC4x0bI2+Cw=
-github.com/go-toolsmith/astequal v1.2.0/go.mod h1:c8NZ3+kSFtFY/8lPso4v8LuJjdJiUFVnSuU3s0qrrDY=
-github.com/go-toolsmith/astfmt v1.1.0 h1:iJVPDPp6/7AaeLJEruMsBUlOYCmvg0MoCfJprsOmcco=
-github.com/go-toolsmith/astfmt v1.1.0/go.mod h1:OrcLlRwu0CuiIBp/8b5PYF9ktGVZUjlNMV634mhwuQ4=
-github.com/go-toolsmith/astp v1.1.0 h1:dXPuCl6u2llURjdPLLDxJeZInAeZ0/eZwFJmqZMnpQA=
-github.com/go-toolsmith/astp v1.1.0/go.mod h1:0T1xFGz9hicKs8Z5MfAqSUitoUYS30pDMsRVIDHs8CA=
-github.com/go-toolsmith/strparse v1.0.0/go.mod h1:YI2nUKP9YGZnL/L1/DLFBfixrcjslWct4wyljWhSRy8=
-github.com/go-toolsmith/strparse v1.1.0 h1:GAioeZUK9TGxnLS+qfdqNbA4z0SSm5zVNtCQiyP2Bvw=
-github.com/go-toolsmith/strparse v1.1.0/go.mod h1:7ksGy58fsaQkGQlY8WVoBFNyEPMGuJin1rfoPS4lBSQ=
-github.com/go-toolsmith/typep v1.1.0 h1:fIRYDyF+JywLfqzyhdiHzRop/GQDxxNhLGQ6gFUNHus=
-github.com/go-toolsmith/typep v1.1.0/go.mod h1:fVIw+7zjdsMxDA3ITWnH1yOiw1rnTQKCsF/sk2H/qig=
-github.com/go-viper/mapstructure/v2 v2.5.0 h1:vM5IJoUAy3d7zRSVtIwQgBj7BiWtMPfmPEgAXnvj1Ro=
-github.com/go-viper/mapstructure/v2 v2.5.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
-github.com/go-xmlfmt/xmlfmt v1.1.3 h1:t8Ey3Uy7jDSEisW2K3somuMKIpzktkWptA0iFCnRUWY=
-github.com/go-xmlfmt/xmlfmt v1.1.3/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM=
-github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=
-github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
-github.com/godoc-lint/godoc-lint v0.11.2 h1:Bp0FkJWoSdNsBikdNgIcgtaoo+xz6I/Y9s5WSBQUeeM=
-github.com/godoc-lint/godoc-lint v0.11.2/go.mod h1:iVpGdL1JCikNH2gGeAn3Hh+AgN5Gx/I/cxV+91L41jo=
-github.com/gofrs/flock v0.13.0 h1:95JolYOvGMqeH31+FC7D2+uULf6mG61mEZ/A8dRYMzw=
-github.com/gofrs/flock v0.13.0/go.mod h1:jxeyy9R1auM5S6JYDBhDt+E2TCo7DkratH4Pgi8P+Z0=
-github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
-github.com/golang-jwt/jwt/v5 v5.3.1 h1:kYf81DTWFe7t+1VvL7eS+jKFVWaUnK9cB1qbwn63YCY=
-github.com/golang-jwt/jwt/v5 v5.3.1/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE=
-github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
-github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
-github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
-github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
+github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo=
+github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE=
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8JmEHVZIycC7hBoQxHH9pNKQORJNozsQ=
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw=
-github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
-github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
-github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
-github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
-github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
-github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
-github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4=
github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
-github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
-github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
-github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
-github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
-github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
-github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk=
-github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=
-github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=
-github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=
-github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w=
-github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
-github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8=
-github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
-github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
-github.com/golangci/asciicheck v0.5.0 h1:jczN/BorERZwK8oiFBOGvlGPknhvq0bjnysTj4nUfo0=
-github.com/golangci/asciicheck v0.5.0/go.mod h1:5RMNAInbNFw2krqN6ibBxN/zfRFa9S6tA1nPdM0l8qQ=
-github.com/golangci/dupl v0.0.0-20250308024227-f665c8d69b32 h1:WUvBfQL6EW/40l6OmeSBYQJNSif4O11+bmWEz+C7FYw=
-github.com/golangci/dupl v0.0.0-20250308024227-f665c8d69b32/go.mod h1:NUw9Zr2Sy7+HxzdjIULge71wI6yEg1lWQr7Evcu8K0E=
-github.com/golangci/go-printf-func-name v0.1.1 h1:hIYTFJqAGp1iwoIfsNTpoq1xZAarogrvjO9AfiW3B4U=
-github.com/golangci/go-printf-func-name v0.1.1/go.mod h1:Es64MpWEZbh0UBtTAICOZiB+miW53w/K9Or/4QogJss=
-github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d h1:viFft9sS/dxoYY0aiOTsLKO2aZQAPT4nlQCsimGcSGE=
-github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d/go.mod h1:ivJ9QDg0XucIkmwhzCDsqcnxxlDStoTl89jDMIoNxKY=
-github.com/golangci/golangci-lint/v2 v2.11.2 h1:4Icd3mEqthcFcFww8L67OBtfKB/obXxko8aFUMqP/5w=
-github.com/golangci/golangci-lint/v2 v2.11.2/go.mod h1:wexdFBIQNhHNhDe1oqzlGFE5dYUqlfccWJKWjoWF1GI=
-github.com/golangci/golines v0.15.0 h1:Qnph25g8Y1c5fdo1X7GaRDGgnMHgnxh4Gk4VfPTtRx0=
-github.com/golangci/golines v0.15.0/go.mod h1:AZjXd23tbHMpowhtnGlj9KCNsysj72aeZVVHnVcZx10=
-github.com/golangci/misspell v0.8.0 h1:qvxQhiE2/5z+BVRo1kwYA8yGz+lOlu5Jfvtx2b04Jbg=
-github.com/golangci/misspell v0.8.0/go.mod h1:WZyyI2P3hxPY2UVHs3cS8YcllAeyfquQcKfdeE9AFVg=
-github.com/golangci/plugin-module-register v0.1.2 h1:e5WM6PO6NIAEcij3B053CohVp3HIYbzSuP53UAYgOpg=
-github.com/golangci/plugin-module-register v0.1.2/go.mod h1:1+QGTsKBvAIvPvoY/os+G5eoqxWn70HYDm2uvUyGuVw=
-github.com/golangci/revgrep v0.8.0 h1:EZBctwbVd0aMeRnNUsFogoyayvKHyxlV3CdUA46FX2s=
-github.com/golangci/revgrep v0.8.0/go.mod h1:U4R/s9dlXZsg8uJmaR1GrloUr14D7qDl8gi2iPXJH8k=
-github.com/golangci/swaggoswag v0.0.0-20250504205917-77f2aca3143e h1:ai0EfmVYE2bRA5htgAG9r7s3tHsfjIhN98WshBTJ9jM=
-github.com/golangci/swaggoswag v0.0.0-20250504205917-77f2aca3143e/go.mod h1:Vrn4B5oR9qRwM+f54koyeH3yzphlecwERs0el27Fr/s=
-github.com/golangci/unconvert v0.0.0-20250410112200-a129a6e6413e h1:gD6P7NEo7Eqtt0ssnqSJNNndxe69DOQ24A5h7+i3KpM=
-github.com/golangci/unconvert v0.0.0-20250410112200-a129a6e6413e/go.mod h1:h+wZwLjUTJnm/P2rwlbJdRPZXOzaT36/FwnPnY2inzc=
-github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
-github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
-github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
-github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
-github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
-github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
-github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
-github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
-github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
-github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
-github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
-github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
-github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
-github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
-github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
-github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
-github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
-github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
-github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
-github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
-github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
-github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
-github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
-github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
-github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
-github.com/gordonklaus/ineffassign v0.2.0 h1:Uths4KnmwxNJNzq87fwQQDDnbNb7De00VOk9Nu0TySs=
-github.com/gordonklaus/ineffassign v0.2.0/go.mod h1:TIpymnagPSexySzs7F9FnO1XFTy8IT3a59vmZp5Y9Lw=
-github.com/gostaticanalysis/analysisutil v0.7.1 h1:ZMCjoue3DtDWQ5WyU16YbjbQEQ3VuzwxALrpYd+HeKk=
-github.com/gostaticanalysis/analysisutil v0.7.1/go.mod h1:v21E3hY37WKMGSnbsw2S/ojApNWb6C1//mXO48CXbVc=
-github.com/gostaticanalysis/comment v1.4.2/go.mod h1:KLUTGDv6HOCotCH8h2erHKmpci2ZoR8VPu34YA2uzdM=
-github.com/gostaticanalysis/comment v1.5.0 h1:X82FLl+TswsUMpMh17srGRuKaaXprTaytmEpgnKIDu8=
-github.com/gostaticanalysis/comment v1.5.0/go.mod h1:V6eb3gpCv9GNVqb6amXzEUX3jXLVK/AdA+IrAMSqvEc=
-github.com/gostaticanalysis/forcetypeassert v0.2.0 h1:uSnWrrUEYDr86OCxWa4/Tp2jeYDlogZiZHzGkWFefTk=
-github.com/gostaticanalysis/forcetypeassert v0.2.0/go.mod h1:M5iPavzE9pPqWyeiVXSFghQjljW1+l/Uke3PXHS6ILY=
-github.com/gostaticanalysis/nilerr v0.1.2 h1:S6nk8a9N8g062nsx63kUkF6AzbHGw7zzyHMcpu52xQU=
-github.com/gostaticanalysis/nilerr v0.1.2/go.mod h1:A19UHhoY3y8ahoL7YKz6sdjDtduwTSI4CsymaC2htPA=
-github.com/gostaticanalysis/testutil v0.3.1-0.20210208050101-bfb5c8eec0e4/go.mod h1:D+FIZ+7OahH3ePw/izIEeH5I06eKs1IKI4Xr64/Am3M=
-github.com/hashicorp/cli v1.1.7 h1:/fZJ+hNdwfTSfsxMBa9WWMlfjUZbX8/LnUxgAd7lCVU=
-github.com/hashicorp/cli v1.1.7/go.mod h1:e6Mfpga9OCT1vqzFuoGZiiF/KaG9CbUfO5s3ghU3YgU=
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I=
github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
@@ -380,9 +65,6 @@ github.com/hashicorp/go-cty v1.5.0 h1:EkQ/v+dDNUqnuVpmS5fPqyY71NXVgT5gf32+57xY8g
github.com/hashicorp/go-cty v1.5.0/go.mod h1:lFUCG5kd8exDobgSfyj4ONE/dc822kiYMguVKdHGMLM=
github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k=
github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M=
-github.com/hashicorp/go-immutable-radix/v2 v2.1.0 h1:CUW5RYIcysz+D3B+l1mDeXrQ7fUvGGCwJfdASSzbrfo=
-github.com/hashicorp/go-immutable-radix/v2 v2.1.0/go.mod h1:hgdqLXA4f6NIjRVisM1TJ9aOJVNRqKZj+xDGF6m7PBw=
-github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk=
github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
github.com/hashicorp/go-plugin v1.7.0 h1:YghfQH/0QmPNc/AZMTFE3ac8fipZyZECHdDPshfk+mA=
@@ -392,97 +74,46 @@ github.com/hashicorp/go-retryablehttp v0.7.8/go.mod h1:rjiScheydd+CxvumBsIrFKlx3
github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8=
github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
-github.com/hashicorp/go-version v1.2.1/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
github.com/hashicorp/go-version v1.8.0 h1:KAkNb1HAiZd1ukkxDFGmokVZe1Xy9HG6NUp+bPle2i4=
github.com/hashicorp/go-version v1.8.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
-github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
-github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
-github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
-github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
-github.com/hashicorp/hc-install v0.9.3 h1:1H4dgmgzxEVwT6E/d/vIL5ORGVKz9twRwDw+qA5Hyho=
-github.com/hashicorp/hc-install v0.9.3/go.mod h1:FQlQ5I3I/X409N/J1U4pPeQQz1R3BoV0IysB7aiaQE0=
-github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
-github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
+github.com/hashicorp/hc-install v0.9.2 h1:v80EtNX4fCVHqzL9Lg/2xkp62bbvQMnvPQ0G+OmtO24=
+github.com/hashicorp/hc-install v0.9.2/go.mod h1:XUqBQNnuT4RsxoxiM9ZaUk0NX8hi2h+Lb6/c0OZnC/I=
github.com/hashicorp/hcl/v2 v2.24.0 h1:2QJdZ454DSsYGoaE6QheQZjtKZSUs9Nh2izTWiwQxvE=
github.com/hashicorp/hcl/v2 v2.24.0/go.mod h1:oGoO1FIQYfn/AgyOhlg9qLC6/nOJPX3qGbkZpYAcqfM=
github.com/hashicorp/logutils v1.0.0 h1:dLEQVugN8vlakKOUE3ihGLTZJRB4j+M2cdTm/ORI65Y=
github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64=
-github.com/hashicorp/terraform-exec v0.25.0 h1:Bkt6m3VkJqYh+laFMrWIpy9KHYFITpOyzRMNI35rNaY=
-github.com/hashicorp/terraform-exec v0.25.0/go.mod h1:dl9IwsCfklDU6I4wq9/StFDp7dNbH/h5AnfS1RmiUl8=
+github.com/hashicorp/terraform-exec v0.24.0 h1:mL0xlk9H5g2bn0pPF6JQZk5YlByqSqrO5VoaNtAf8OE=
+github.com/hashicorp/terraform-exec v0.24.0/go.mod h1:lluc/rDYfAhYdslLJQg3J0oDqo88oGQAdHR+wDqFvo4=
github.com/hashicorp/terraform-json v0.27.2 h1:BwGuzM6iUPqf9JYM/Z4AF1OJ5VVJEEzoKST/tRDBJKU=
github.com/hashicorp/terraform-json v0.27.2/go.mod h1:GzPLJ1PLdUG5xL6xn1OXWIjteQRT2CNT9o/6A9mi9hE=
-github.com/hashicorp/terraform-plugin-docs v0.24.0 h1:YNZYd+8cpYclQyXbl1EEngbld8w7/LPOm99GD5nikIU=
-github.com/hashicorp/terraform-plugin-docs v0.24.0/go.mod h1:YLg+7LEwVmRuJc0EuCw0SPLxuQXw5mW8iJ5ml/kvi+o=
-github.com/hashicorp/terraform-plugin-framework v1.18.0 h1:Xy6OfqSTZfAAKXSlJ810lYvuQvYkOpSUoNMQ9l2L1RA=
-github.com/hashicorp/terraform-plugin-framework v1.18.0/go.mod h1:eeFIf68PME+kenJeqSrIcpHhYQK0TOyv7ocKdN4Z35E=
+github.com/hashicorp/terraform-plugin-framework v1.17.0 h1:JdX50CFrYcYFY31gkmitAEAzLKoBgsK+iaJjDC8OexY=
+github.com/hashicorp/terraform-plugin-framework v1.17.0/go.mod h1:4OUXKdHNosX+ys6rLgVlgklfxN3WHR5VHSOABeS/BM0=
github.com/hashicorp/terraform-plugin-framework-validators v0.19.0 h1:Zz3iGgzxe/1XBkooZCewS0nJAaCFPFPHdNJd8FgE4Ow=
github.com/hashicorp/terraform-plugin-framework-validators v0.19.0/go.mod h1:GBKTNGbGVJohU03dZ7U8wHqc2zYnMUawgCN+gC0itLc=
-github.com/hashicorp/terraform-plugin-go v0.30.0 h1:VmEiD0n/ewxbvV5VI/bYwNtlSEAXtHaZlSnyUUuQK6k=
-github.com/hashicorp/terraform-plugin-go v0.30.0/go.mod h1:8d523ORAW8OHgA9e8JKg0ezL3XUO84H0A25o4NY/jRo=
+github.com/hashicorp/terraform-plugin-go v0.29.0 h1:1nXKl/nSpaYIUBU1IG/EsDOX0vv+9JxAltQyDMpq5mU=
+github.com/hashicorp/terraform-plugin-go v0.29.0/go.mod h1:vYZbIyvxyy0FWSmDHChCqKvI40cFTDGSb3D8D70i9GM=
github.com/hashicorp/terraform-plugin-log v0.10.0 h1:eu2kW6/QBVdN4P3Ju2WiB2W3ObjkAsyfBsL3Wh1fj3g=
github.com/hashicorp/terraform-plugin-log v0.10.0/go.mod h1:/9RR5Cv2aAbrqcTSdNmY1NRHP4E3ekrXRGjqORpXyB0=
-github.com/hashicorp/terraform-plugin-sdk/v2 v2.39.0 h1:ltFG/dSs4mMHNpBqHptCtJqYM4FekUDJbUcWj+6HGlg=
-github.com/hashicorp/terraform-plugin-sdk/v2 v2.39.0/go.mod h1:xJk7ap8vRI/B2U6TrVs7bu/gTihyor8XBTLSs5Y6z2w=
+github.com/hashicorp/terraform-plugin-sdk/v2 v2.38.1 h1:mlAq/OrMlg04IuJT7NpefI1wwtdpWudnEmjuQs04t/4=
+github.com/hashicorp/terraform-plugin-sdk/v2 v2.38.1/go.mod h1:GQhpKVvvuwzD79e8/NZ+xzj+ZpWovdPAe8nfV/skwNU=
github.com/hashicorp/terraform-plugin-testing v1.14.0 h1:5t4VKrjOJ0rg0sVuSJ86dz5K7PHsMO6OKrHFzDBerWA=
github.com/hashicorp/terraform-plugin-testing v1.14.0/go.mod h1:1qfWkecyYe1Do2EEOK/5/WnTyvC8wQucUkkhiGLg5nk=
github.com/hashicorp/terraform-registry-address v0.4.0 h1:S1yCGomj30Sao4l5BMPjTGZmCNzuv7/GDTDX99E9gTk=
github.com/hashicorp/terraform-registry-address v0.4.0/go.mod h1:LRS1Ay0+mAiRkUyltGT+UHWkIqTFvigGn/LbMshfflE=
-github.com/hashicorp/terraform-svchost v0.2.1 h1:ubvrTFw3Q7CsoEaX7V06PtCTKG3wu7GyyobAoN4eF3Q=
-github.com/hashicorp/terraform-svchost v0.2.1/go.mod h1:zDMheBLvNzu7Q6o9TBvPqiZToJcSuCLXjAXxBslSky4=
+github.com/hashicorp/terraform-svchost v0.2.0 h1:wVc2vMiodOHvNZcQw/3y9af1XSomgjGSv+rv3BMCk7I=
+github.com/hashicorp/terraform-svchost v0.2.0/go.mod h1:/98rrS2yZsbppi4VGVCjwYmh8dqsKzISqK7Hli+0rcQ=
github.com/hashicorp/yamux v0.1.2 h1:XtB8kyFOyHXYVFnwT5C3+Bdo8gArse7j2AQ0DA0Uey8=
github.com/hashicorp/yamux v0.1.2/go.mod h1:C+zze2n6e/7wshOZep2A70/aQU6QBRWJO/G6FT1wIns=
-github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
-github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg=
-github.com/huandu/xstrings v1.3.3 h1:/Gcsuc1x8JVbJ9/rlye4xZnVAbEkGauT8lbebqcQws4=
-github.com/huandu/xstrings v1.3.3/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
github.com/iancoleman/strcase v0.3.0 h1:nTXanmYxhfFAMjZL34Ov6gkzEsSJZ5DbhxWjvSASxEI=
github.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho=
-github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
-github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA=
-github.com/imdario/mergo v0.3.15 h1:M8XP7IuFNsqUx6VPK2P9OSmsYsI/YFaGil0uD21V3dM=
-github.com/imdario/mergo v0.3.15/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY=
-github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
-github.com/ivanpirog/coloredcobra v1.0.1 h1:aURSdEmlR90/tSiWS0dMjdwOvCVUeYLfltLfbgNxrN4=
-github.com/ivanpirog/coloredcobra v1.0.1/go.mod h1:iho4nEKcnwZFiniGSdcgdvRgZNjxm+h20acv8vqmN6Q=
-github.com/jarcoal/httpmock v1.4.1 h1:0Ju+VCFuARfFlhVXFc2HxlcQkfB+Xq12/EotHko+x2A=
-github.com/jarcoal/httpmock v1.4.1/go.mod h1:ftW1xULwo+j0R0JJkJIIi7UKigZUXCLLanykgjwBXL0=
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A=
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo=
-github.com/jgautheron/goconst v1.8.2 h1:y0XF7X8CikZ93fSNT6WBTb/NElBu9IjaY7CCYQrCMX4=
-github.com/jgautheron/goconst v1.8.2/go.mod h1:A0oxgBCHy55NQn6sYpO7UdnA9p+h7cPtoOZUmvNIako=
github.com/jhump/protoreflect v1.17.0 h1:qOEr613fac2lOuTgWN4tPAtLL7fUSbuJL5X5XumQh94=
github.com/jhump/protoreflect v1.17.0/go.mod h1:h9+vUUL38jiBzck8ck+6G/aeMX8Z4QUY/NiJPwPNi+8=
-github.com/jingyugao/rowserrcheck v1.1.1 h1:zibz55j/MJtLsjP1OF4bSdgXxwL1b+Vn7Tjzq7gFzUs=
-github.com/jingyugao/rowserrcheck v1.1.1/go.mod h1:4yvlZSDb3IyDTUZJUmpZfm2Hwok+Dtp+nu2qOq+er9c=
-github.com/jjti/go-spancheck v0.6.5 h1:lmi7pKxa37oKYIMScialXUK6hP3iY5F1gu+mLBPgYB8=
-github.com/jjti/go-spancheck v0.6.5/go.mod h1:aEogkeatBrbYsyW6y5TgDfihCulDYciL1B7rG2vSsrU=
-github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0=
-github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
-github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4=
-github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
-github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
-github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
-github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
-github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
-github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
-github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
-github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM=
-github.com/julz/importas v0.2.0 h1:y+MJN/UdL63QbFJHws9BVC5RpA2iq0kpjrFajTGivjQ=
-github.com/julz/importas v0.2.0/go.mod h1:pThlt589EnCYtMnmhmRYY/qn9lCf/frPOK+WMx3xiJY=
-github.com/karamaru-alpha/copyloopvar v1.2.2 h1:yfNQvP9YaGQR7VaWLYcfZUlRP2eo2vhExWKxD/fP6q0=
-github.com/karamaru-alpha/copyloopvar v1.2.2/go.mod h1:oY4rGZqZ879JkJMtX3RRkcXRkmUvH0x35ykgaKgsgJY=
github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4=
github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM=
-github.com/kisielk/errcheck v1.10.0 h1:Lvs/YAHP24YKg08LA8oDw2z9fJVme090RAXd90S+rrw=
-github.com/kisielk/errcheck v1.10.0/go.mod h1:kQxWMMVZgIkDq7U8xtG/n2juOjbLgZtedi0D+/VL/i8=
-github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
-github.com/kkHAIKE/contextcheck v1.1.6 h1:7HIyRcnyzxL9Lz06NGhiKvenXq7Zw6Q0UQu/ttjfJCE=
-github.com/kkHAIKE/contextcheck v1.1.6/go.mod h1:3dDbMRNBFaq8HFXWC1JyvDSPm43CmE6IuHam8Wr0rkg=
-github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
-github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
-github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
@@ -490,45 +121,8 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
-github.com/kulti/thelper v0.7.1 h1:fI8QITAoFVLx+y+vSyuLBP+rcVIB8jKooNSCT2EiI98=
-github.com/kulti/thelper v0.7.1/go.mod h1:NsMjfQEy6sd+9Kfw8kCP61W1I0nerGSYSFnGaxQkcbs=
-github.com/kunwardeep/paralleltest v1.0.15 h1:ZMk4Qt306tHIgKISHWFJAO1IDQJLc6uDyJMLyncOb6w=
-github.com/kunwardeep/paralleltest v1.0.15/go.mod h1:di4moFqtfz3ToSKxhNjhOZL+696QtJGCFe132CbBLGk=
-github.com/lasiar/canonicalheader v1.1.2 h1:vZ5uqwvDbyJCnMhmFYimgMZnJMjwljN5VGY0VKbMXb4=
-github.com/lasiar/canonicalheader v1.1.2/go.mod h1:qJCeLFS0G/QlLQ506T+Fk/fWMa2VmBUiEI2cuMK4djI=
-github.com/ldez/exptostd v0.4.5 h1:kv2ZGUVI6VwRfp/+bcQ6Nbx0ghFWcGIKInkG/oFn1aQ=
-github.com/ldez/exptostd v0.4.5/go.mod h1:QRjHRMXJrCTIm9WxVNH6VW7oN7KrGSht69bIRwvdFsM=
github.com/ldez/go-git-cmd-wrapper/v2 v2.9.1 h1:QJRB9Gs5i/h6TVJI6yl09Qm6rNooznRiKwIw+VIxd90=
github.com/ldez/go-git-cmd-wrapper/v2 v2.9.1/go.mod h1:0eUeas7XtKDPKQbB0KijfaMPbuQ/cIprtoTRiwaUoFg=
-github.com/ldez/gomoddirectives v0.8.0 h1:JqIuTtgvFC2RdH1s357vrE23WJF2cpDCPFgA/TWDGpk=
-github.com/ldez/gomoddirectives v0.8.0/go.mod h1:jutzamvZR4XYJLr0d5Honycp4Gy6GEg2mS9+2YX3F1Q=
-github.com/ldez/grignotin v0.10.1 h1:keYi9rYsgbvqAZGI1liek5c+jv9UUjbvdj3Tbn5fn4o=
-github.com/ldez/grignotin v0.10.1/go.mod h1:UlDbXFCARrXbWGNGP3S5vsysNXAPhnSuBufpTEbwOas=
-github.com/ldez/structtags v0.6.1 h1:bUooFLbXx41tW8SvkfwfFkkjPYvFFs59AAMgVg6DUBk=
-github.com/ldez/structtags v0.6.1/go.mod h1:YDxVSgDy/MON6ariaxLF2X09bh19qL7MtGBN5MrvbdY=
-github.com/ldez/tagliatelle v0.7.2 h1:KuOlL70/fu9paxuxbeqlicJnCspCRjH0x8FW+NfgYUk=
-github.com/ldez/tagliatelle v0.7.2/go.mod h1:PtGgm163ZplJfZMZ2sf5nhUT170rSuPgBimoyYtdaSI=
-github.com/ldez/usetesting v0.5.0 h1:3/QtzZObBKLy1F4F8jLuKJiKBjjVFi1IavpoWbmqLwc=
-github.com/ldez/usetesting v0.5.0/go.mod h1:Spnb4Qppf8JTuRgblLrEWb7IE6rDmUpGvxY3iRrzvDQ=
-github.com/leonklingele/grouper v1.1.2 h1:o1ARBDLOmmasUaNDesWqWCIFH3u7hoFlM84YrjT3mIY=
-github.com/leonklingele/grouper v1.1.2/go.mod h1:6D0M/HVkhs2yRKRFZUoGjeDy7EZTfFBE9gl4kjmIGkA=
-github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
-github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
-github.com/macabu/inamedparam v0.2.0 h1:VyPYpOc10nkhI2qeNUdh3Zket4fcZjEWe35poddBCpE=
-github.com/macabu/inamedparam v0.2.0/go.mod h1:+Pee9/YfGe5LJ62pYXqB89lJ+0k5bsR8Wgz/C0Zlq3U=
-github.com/magiconair/properties v1.8.6 h1:5ibWZ6iY0NctNGWo87LalDlEZ6R41TqbbDamhfG/Qzo=
-github.com/magiconair/properties v1.8.6/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60=
-github.com/manuelarte/embeddedstructfieldcheck v0.4.0 h1:3mAIyaGRtjK6EO9E73JlXLtiy7ha80b2ZVGyacxgfww=
-github.com/manuelarte/embeddedstructfieldcheck v0.4.0/go.mod h1:z8dFSyXqp+fC6NLDSljRJeNQJJDWnY7RoWFzV3PC6UM=
-github.com/manuelarte/funcorder v0.5.0 h1:llMuHXXbg7tD0i/LNw8vGnkDTHFpTnWqKPI85Rknc+8=
-github.com/manuelarte/funcorder v0.5.0/go.mod h1:Yt3CiUQthSBMBxjShjdXMexmzpP8YGvGLjrxJNkO2hA=
-github.com/maratori/testableexamples v1.0.1 h1:HfOQXs+XgfeRBJ+Wz0XfH+FHnoY9TVqL6Fcevpzy4q8=
-github.com/maratori/testableexamples v1.0.1/go.mod h1:XE2F/nQs7B9N08JgyRmdGjYVGqxWwClLPCGSQhXQSrQ=
-github.com/maratori/testpackage v1.1.2 h1:ffDSh+AgqluCLMXhM19f/cpvQAKygKAJXFl9aUjmbqs=
-github.com/maratori/testpackage v1.1.2/go.mod h1:8F24GdVDFW5Ew43Et02jamrVMNXLUNaOynhDssITGfc=
-github.com/matoous/godox v1.1.0 h1:W5mqwbyWrwZv6OQ5Z1a/DHGMOvXYCBP3+Ht7KMoJhq4=
-github.com/matoous/godox v1.1.0/go.mod h1:jgE/3fUXiTurkdHOLT5WEkThTSuE7yxHv5iWPa80afs=
-github.com/matryer/is v1.4.0/go.mod h1:8I/i5uYgLzgsgEloJE1U6xx5HkBQpAZvepWuujKwMRU=
github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4=
github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE=
@@ -537,206 +131,47 @@ github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Ky
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
-github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0=
-github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
-github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
-github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
-github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU=
-github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
-github.com/maxatome/go-testdeep v1.14.0 h1:rRlLv1+kI8eOI3OaBXZwb3O7xY3exRzdW5QyX48g9wI=
-github.com/maxatome/go-testdeep v1.14.0/go.mod h1:lPZc/HAcJMP92l7yI6TRz1aZN5URwUBUAfUNvrclaNM=
-github.com/mgechev/revive v1.15.0 h1:vJ0HzSBzfNyPbHKolgiFjHxLek9KUijhqh42yGoqZ8Q=
-github.com/mgechev/revive v1.15.0/go.mod h1:LlAKO3QQe9OJ0pVZzI2GPa8CbXGZ/9lNpCGvK4T/a8A=
-github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw=
github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw=
github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s=
-github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
-github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
github.com/mitchellh/go-testing-interface v1.14.1 h1:jrgshOhYAUVNMAJiKbEu7EqAwgJJ2JqpQmpLJOu07cU=
github.com/mitchellh/go-testing-interface v1.14.1/go.mod h1:gfgS7OtZj6MA4U1UrDRp04twqAjfvlZyCfX3sDjEym8=
github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0=
github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0=
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
-github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ=
github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
-github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
-github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
-github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
-github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
-github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
-github.com/moricho/tparallel v0.3.2 h1:odr8aZVFA3NZrNybggMkYO3rgPRcqjeQUlBBFVxKHTI=
-github.com/moricho/tparallel v0.3.2/go.mod h1:OQ+K3b4Ln3l2TZveGCywybl68glfLEwFGqvnjok8b+U=
-github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc=
-github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk=
-github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
-github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
-github.com/nakabonne/nestif v0.3.1 h1:wm28nZjhQY5HyYPx+weN3Q65k6ilSBxDb8v5S81B81U=
-github.com/nakabonne/nestif v0.3.1/go.mod h1:9EtoZochLn5iUprVDmDjqGKPofoUEBL8U4Ngq6aY7OE=
-github.com/nishanths/exhaustive v0.12.0 h1:vIY9sALmw6T/yxiASewa4TQcFsVYZQQRUQJhKRf3Swg=
-github.com/nishanths/exhaustive v0.12.0/go.mod h1:mEZ95wPIZW+x8kC4TgC+9YCUgiST7ecevsVDTgc2obs=
-github.com/nishanths/predeclared v0.2.2 h1:V2EPdZPliZymNAn79T8RkNApBjMmVKh5XRpLm/w98Vk=
-github.com/nishanths/predeclared v0.2.2/go.mod h1:RROzoN6TnGQupbC+lqggsOlcgysk3LMK/HI84Mp280c=
-github.com/nunnatsa/ginkgolinter v0.23.0 h1:x3o4DGYOWbBMP/VdNQKgSj+25aJKx2Pe6lHr8gBcgf8=
-github.com/nunnatsa/ginkgolinter v0.23.0/go.mod h1:9qN1+0akwXEccwV1CAcCDfcoBlWXHB+ML9884pL4SZ4=
github.com/oklog/run v1.2.0 h1:O8x3yXwah4A73hJdlrwo/2X6J62gE5qTMusH0dvz60E=
github.com/oklog/run v1.2.0/go.mod h1:mgDbKRSwPhJfesJ4PntqFUbKQRZ50NgmZTSPlFA0YFk=
-github.com/otiai10/copy v1.2.0/go.mod h1:rrF5dJ5F0t/EWSYODDu4j9/vEeYHMkc8jt0zJChqQWw=
-github.com/otiai10/curr v0.0.0-20150429015615-9b4961190c95/go.mod h1:9qAhocn7zKJG+0mI8eUu6xqkFDYS2kb2saOteoSB3cE=
-github.com/otiai10/curr v1.0.0/go.mod h1:LskTG5wDwr8Rs+nNQ+1LlxRjAtTZZjtJW4rMXl6j4vs=
-github.com/otiai10/mint v1.3.0/go.mod h1:F5AjcsTsWUqX+Na9fpHb52P8pcRX2CI6A3ctIT91xUo=
-github.com/otiai10/mint v1.3.1/go.mod h1:/yxELlJQ0ufhjUwhshSj+wFjZ78CnZ48/1wtmBH1OTc=
-github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8=
-github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
-github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
-github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY=
github.com/pjbgf/sha1cd v0.3.2 h1:a9wb0bp1oC2TGwStyn0Umc/IGKQnEgF0vVaZ8QF8eo4=
github.com/pjbgf/sha1cd v0.3.2/go.mod h1:zQWigSxVmsHEZow5qaLtPYxpcKMMQpa09ixqBxuCS6A=
-github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
-github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
-github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
-github.com/posener/complete v1.2.3 h1:NP0eAhjcjImqslEwo/1hq7gpajME0fTLTezBKDqfXqo=
-github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s=
-github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
-github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo=
-github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M=
-github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0=
-github.com/prometheus/client_golang v1.12.1 h1:ZiaPsmm9uiBeaSMRznKsCDNtPCS0T3JVDGF+06gjBzk=
-github.com/prometheus/client_golang v1.12.1/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY=
-github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
-github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
-github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
-github.com/prometheus/client_model v0.2.0 h1:uq5h0d+GuxiXLJLNABMgp2qUWDPiLvgCzz2dUR+/W/M=
-github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
-github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
-github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo=
-github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc=
-github.com/prometheus/common v0.32.1 h1:hWIdL3N2HoUx3B8j3YN9mWor0qhY/NlEKZEaXxuIRh4=
-github.com/prometheus/common v0.32.1/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls=
-github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
-github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
-github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU=
-github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA=
-github.com/prometheus/procfs v0.7.3 h1:4jVXhlkAyzOScmCkXBTOLRLTz8EeU+eyjrwB/EPq0VU=
-github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA=
-github.com/quasilyte/go-ruleguard v0.4.5 h1:AGY0tiOT5hJX9BTdx/xBdoCubQUAE2grkqY2lSwvZcA=
-github.com/quasilyte/go-ruleguard v0.4.5/go.mod h1:Vl05zJ538vcEEwu16V/Hdu7IYZWyKSwIy4c88Ro1kRE=
-github.com/quasilyte/go-ruleguard/dsl v0.3.23 h1:lxjt5B6ZCiBeeNO8/oQsegE6fLeCzuMRoVWSkXC4uvY=
-github.com/quasilyte/go-ruleguard/dsl v0.3.23/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU=
-github.com/quasilyte/gogrep v0.5.0 h1:eTKODPXbI8ffJMN+W2aE0+oL0z/nh8/5eNdiO34SOAo=
-github.com/quasilyte/gogrep v0.5.0/go.mod h1:Cm9lpz9NZjEoL1tgZ2OgeUKPIxL1meE7eo60Z6Sk+Ng=
-github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727 h1:TCg2WBOl980XxGFEZSS6KlBGIV0diGdySzxATTWoqaU=
-github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727/go.mod h1:rlzQ04UMyJXu/aOvhd8qT+hvDrFpiwqp8MRXDY9szc0=
-github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 h1:M8mH9eK4OUR4lu7Gd+PU1fV2/qnDNfzT635KRSObncs=
-github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567/go.mod h1:DWNGW8A4Y+GyBgPuaQJuWiy0XYftx4Xm/y5Jqk9I6VQ=
-github.com/raeperd/recvcheck v0.2.0 h1:GnU+NsbiCqdC2XX5+vMZzP+jAJC5fht7rcVTAhX74UI=
-github.com/raeperd/recvcheck v0.2.0/go.mod h1:n04eYkwIR0JbgD73wT8wL4JjPC3wm0nFtzBnWNocnYU=
-github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
-github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
-github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
-github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
-github.com/ryancurrah/gomodguard v1.4.1 h1:eWC8eUMNZ/wM/PWuZBv7JxxqT5fiIKSIyTvjb7Elr+g=
-github.com/ryancurrah/gomodguard v1.4.1/go.mod h1:qnMJwV1hX9m+YJseXEBhd2s90+1Xn6x9dLz11ualI1I=
-github.com/ryanrolds/sqlclosecheck v0.5.1 h1:dibWW826u0P8jNLsLN+En7+RqWWTYrjCB9fJfSfdyCU=
-github.com/ryanrolds/sqlclosecheck v0.5.1/go.mod h1:2g3dUjoS6AL4huFdv6wn55WpLIDjY7ZgUR4J8HOO/XQ=
-github.com/sanposhiho/wastedassign/v2 v2.1.0 h1:crurBF7fJKIORrV85u9UUpePDYGWnwvv3+A96WvwXT0=
-github.com/sanposhiho/wastedassign/v2 v2.1.0/go.mod h1:+oSmSC+9bQ+VUAxA66nBb0Z7N8CK7mscKTDYC6aIek4=
-github.com/santhosh-tekuri/jsonschema/v6 v6.0.2 h1:KRzFb2m7YtdldCEkzs6KqmJw4nqEVZGK7IN2kJkjTuQ=
-github.com/santhosh-tekuri/jsonschema/v6 v6.0.2/go.mod h1:JXeL+ps8p7/KNMjDQk3TCwPpBy0wYklyWTfbkIzdIFU=
-github.com/sashamelentyev/interfacebloat v1.1.0 h1:xdRdJp0irL086OyW1H/RTZTr1h/tMEOsumirXcOJqAw=
-github.com/sashamelentyev/interfacebloat v1.1.0/go.mod h1:+Y9yU5YdTkrNvoX0xHc84dxiN1iBi9+G8zZIhPVoNjQ=
-github.com/sashamelentyev/usestdlibvars v1.29.0 h1:8J0MoRrw4/NAXtjQqTHrbW9NN+3iMf7Knkq057v4XOQ=
-github.com/sashamelentyev/usestdlibvars v1.29.0/go.mod h1:8PpnjHMk5VdeWlVb4wCdrB8PNbLqZ3wBZTZWkrpZZL8=
-github.com/securego/gosec/v2 v2.24.7 h1:3k5yJnrhT1TTdsG0ZsnenlfCcT+7Y/+zeCPHbL7QAn8=
-github.com/securego/gosec/v2 v2.24.7/go.mod h1:AdDJbjcG/XxFgVv7pW19vMNYlFM6+Q6Qy3t6lWAUcEY=
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8=
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4=
-github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
-github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8=
-github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
-github.com/shurcooL/go v0.0.0-20180423040247-9e1955d9fb6e/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk=
-github.com/shurcooL/go-goon v0.0.0-20170922171312-37c2f522c041/go.mod h1:N5mDOmsrJOB+vfqUK+7DmDyjhSLIIBnXo9lvZJj3MWQ=
-github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
-github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
-github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88=
-github.com/sirupsen/logrus v1.9.4 h1:TsZE7l11zFCLZnZ+teH4Umoq5BhEIfIzfRDZ1Uzql2w=
-github.com/sirupsen/logrus v1.9.4/go.mod h1:ftWc9WdOfJ0a92nsE2jF5u5ZwH8Bv2zdeOC42RjbV2g=
-github.com/sivchari/containedctx v1.0.3 h1:x+etemjbsh2fB5ewm5FeLNi5bUjK0V8n0RB+Wwfd0XE=
-github.com/sivchari/containedctx v1.0.3/go.mod h1:c1RDvCbnJLtH4lLcYD/GqwiBSSf4F5Qk0xld2rBqzJ4=
github.com/skeema/knownhosts v1.3.1 h1:X2osQ+RAjK76shCbvhHHHVl3ZlgDm8apHEHFqRjnBY8=
github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQNxPwTcfiY=
-github.com/sonatard/noctx v0.5.0 h1:e/jdaqAsuWVOKQ0P6NWiIdDNHmHT5SwuuSfojFjzwrw=
-github.com/sonatard/noctx v0.5.0/go.mod h1:64XdbzFb18XL4LporKXp8poqZtPKbCrqQ402CV+kJas=
-github.com/sourcegraph/go-diff v0.7.0 h1:9uLlrd5T46OXs5qpp8L/MTltk0zikUGi0sNNyCpA8G0=
-github.com/sourcegraph/go-diff v0.7.0/go.mod h1:iBszgVvyxdc8SFZ7gm69go2KDdt3ag071iBaWPF6cjs=
-github.com/spf13/afero v1.15.0 h1:b/YBCLWAJdFWJTN9cLhiXXcD7mzKn9Dm86dNnfyQw1I=
-github.com/spf13/afero v1.15.0/go.mod h1:NC2ByUVxtQs4b3sIUphxK0NioZnmxgyCrfzeuq8lxMg=
-github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
-github.com/spf13/cast v1.5.0 h1:rj3WzYc11XZaIZMPKmwP96zkFEnnAmV8s6XbB2aY32w=
-github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU=
-github.com/spf13/cobra v1.4.0/go.mod h1:Wo4iy3BUC+X2Fybo0PDqwJIv3dNRiZLHQymsfxlB84g=
github.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU=
github.com/spf13/cobra v1.10.2/go.mod h1:7C1pvHqHw5A4vrJfjNwvOdzYu0Gml16OCs2GRiTUUS4=
-github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=
-github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo=
-github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk=
github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
-github.com/spf13/viper v1.12.0 h1:CZ7eSOd3kZoaYDLbXnmzgQI5RlciuXBMA+18HwHRfZQ=
-github.com/spf13/viper v1.12.0/go.mod h1:b6COn30jlNxbm/V2IqWiNWkJ+vZNiMNksliPCiuKtSI=
-github.com/ssgreg/nlreturn/v2 v2.2.1 h1:X4XDI7jstt3ySqGU86YGAURbxw3oTDPK9sPEi6YEwQ0=
-github.com/ssgreg/nlreturn/v2 v2.2.1/go.mod h1:E/iiPB78hV7Szg2YfRgyIrk1AD6JVMTRkkxBiELzh2I=
-github.com/stackitcloud/stackit-sdk-go/core v0.22.0 h1:6rViz7GnNwXSh51Lur5xuDzO8EWSZfN9J0HvEkBKq6c=
-github.com/stackitcloud/stackit-sdk-go/core v0.22.0/go.mod h1:osMglDby4csGZ5sIfhNyYq1bS1TxIdPY88+skE/kkmI=
-github.com/stackitcloud/stackit-sdk-go/services/postgresflex v1.4.0 h1:4wfRYOEFSpNLPvOV0YNIoGLVQBIQNkCvZwmL7JFzphM=
-github.com/stackitcloud/stackit-sdk-go/services/postgresflex v1.4.0/go.mod h1:tIYiqgnS9929dEhQjf6rx1yNsdFf59e4r2wcXQMkLYo=
-github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex v1.5.0 h1:JeSnhioDCfV5K4V4mOjKtKgkgNtrkrU9bkt7JBs57lA=
-github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex v1.5.0/go.mod h1:3NQNKhHYIjIHTmf6RAcYLdnq17a8AZKkqFCu9Q/Y/3Y=
-github.com/stbenjam/no-sprintf-host-port v0.3.1 h1:AyX7+dxI4IdLBPtDbsGAyqiTSLpCP9hWRrXQDU4Cm/g=
-github.com/stbenjam/no-sprintf-host-port v0.3.1/go.mod h1:ODbZesTCHMVKthBHskvUUexdcNHAQRXk9NpSsL8p/HQ=
+github.com/stackitcloud/stackit-sdk-go/core v0.21.0 h1:QXZqiaO7U/4IpTkJfzt4dt6QxJzG2uUS12mBnHpYNik=
+github.com/stackitcloud/stackit-sdk-go/core v0.21.0/go.mod h1:fqto7M82ynGhEnpZU6VkQKYWYoFG5goC076JWXTUPRQ=
+github.com/stackitcloud/stackit-sdk-go/services/iaasalpha v0.1.23-alpha h1:ugpMOMUZGB0yXsWcfe97F7GCdjlexbjFuGD8ZeyMSts=
+github.com/stackitcloud/stackit-sdk-go/services/iaasalpha v0.1.23-alpha/go.mod h1:v5VGvTxLcCdJJmblbhqYalt/MFHcElDfYoy15CMhaWs=
+github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex v1.4.1 h1:6MJdy1xmdE+uOo/F8mR5HSldjPSHpdhwuqS3u9m2EWQ=
+github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex v1.4.1/go.mod h1:XLr3ZfrT1g8ZZMm7A6RXOPBuhBkikdUN2o/+/Y+Hu+g=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
-github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
-github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
-github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
-github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
-github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
-github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
-github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals=
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
-github.com/subosito/gotenv v1.4.1 h1:jyEFiXpy21Wm81FBN71l9VoMMV8H8jG+qIK3GCpY6Qs=
-github.com/subosito/gotenv v1.4.1/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0=
github.com/teambition/rrule-go v1.8.2 h1:lIjpjvWTj9fFUZCmuoVDrKVOtdiyzbzc93qTmRVe/J8=
github.com/teambition/rrule-go v1.8.2/go.mod h1:Ieq5AbrKGciP1V//Wq8ktsTXwSwJHDD5mD/wLBGl3p4=
-github.com/tenntenn/modver v1.0.1/go.mod h1:bePIyQPb7UeioSRkw3Q0XeMhYZSMx9B8ePqg6SAMGH0=
-github.com/tenntenn/text/transform v0.0.0-20200319021203-7eef512accb3/go.mod h1:ON8b8w4BN/kE1EOhwT0o+d62W65a6aPw1nouo9LMgyY=
-github.com/tetafro/godot v1.5.4 h1:u1ww+gqpRLiIA16yF2PV1CV1n/X3zhyezbNXC3E14Sg=
-github.com/tetafro/godot v1.5.4/go.mod h1:eOkMrVQurDui411nBY2FA05EYH01r14LuWY/NrVDVcU=
-github.com/timakin/bodyclose v0.0.0-20241222091800-1db5c5ca4d67 h1:9LPGD+jzxMlnk5r6+hJnar67cgpDIz/iyD+rfl5r2Vk=
-github.com/timakin/bodyclose v0.0.0-20241222091800-1db5c5ca4d67/go.mod h1:mkjARE7Yr8qU23YcGMSALbIxTQ9r9QBVahQOBRfU460=
-github.com/timonwong/loggercheck v0.11.0 h1:jdaMpYBl+Uq9mWPXv1r8jc5fC3gyXx4/WGwTnnNKn4M=
-github.com/timonwong/loggercheck v0.11.0/go.mod h1:HEAWU8djynujaAVX7QI65Myb8qgfcZ1uKbdpg3ZzKl8=
-github.com/tomarrell/wrapcheck/v2 v2.12.0 h1:H/qQ1aNWz/eeIhxKAFvkfIA+N7YDvq6TWVFL27Of9is=
-github.com/tomarrell/wrapcheck/v2 v2.12.0/go.mod h1:AQhQuZd0p7b6rfW+vUwHm5OMCGgp63moQ9Qr/0BpIWo=
-github.com/tommy-muehle/go-mnd/v2 v2.5.1 h1:NowYhSdyE/1zwK9QCLeRb6USWdoif80Ie+v+yU8u1Zw=
-github.com/tommy-muehle/go-mnd/v2 v2.5.1/go.mod h1:WsUAkMJMYww6l/ufffCD3m+P7LEvr8TnZn9lwVDlgzw=
-github.com/ultraware/funlen v0.2.0 h1:gCHmCn+d2/1SemTdYMiKLAHFYxTYz7z9VIDRaTGyLkI=
-github.com/ultraware/funlen v0.2.0/go.mod h1:ZE0q4TsJ8T1SQcjmkhN/w+MceuatI6pBFSxxyteHIJA=
-github.com/ultraware/whitespace v0.2.0 h1:TYowo2m9Nfj1baEQBjuHzvMRbp19i+RCcRYrSWoFa+g=
-github.com/ultraware/whitespace v0.2.0/go.mod h1:XcP1RLD81eV4BW8UhQlpaR+SDc2givTvyI8a586WjW8=
-github.com/uudashr/gocognit v1.2.1 h1:CSJynt5txTnORn/DkhiB4mZjwPuifyASC8/6Q0I/QS4=
-github.com/uudashr/gocognit v1.2.1/go.mod h1:acaubQc6xYlXFEMb9nWX2dYBzJ/bIjEkc1zzvyIZg5Q=
-github.com/uudashr/iface v1.4.1 h1:J16Xl1wyNX9ofhpHmQ9h9gk5rnv2A6lX/2+APLTo0zU=
-github.com/uudashr/iface v1.4.1/go.mod h1:pbeBPlbuU2qkNDn0mmfrxP2X+wjPMIQAy+r1MBXSXtg=
github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk=
github.com/vmihailenco/msgpack v4.0.4+incompatible h1:dSLoQfGFAo3F6OoNhwUmLwVgaUXK79GlxNBwueZn0xI=
github.com/vmihailenco/msgpack v4.0.4+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk=
@@ -746,446 +181,92 @@ github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAh
github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds=
github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM=
github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw=
-github.com/xen0n/gosmopolitan v1.3.0 h1:zAZI1zefvo7gcpbCOrPSHJZJYA9ZgLfJqtKzZ5pHqQM=
-github.com/xen0n/gosmopolitan v1.3.0/go.mod h1:rckfr5T6o4lBtM1ga7mLGKZmLxswUoH1zxHgNXOsEt4=
-github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no=
-github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM=
-github.com/yagipy/maintidx v1.0.0 h1:h5NvIsCz+nRDapQ0exNv4aJ0yXSI0420omVANTv3GJM=
-github.com/yagipy/maintidx v1.0.0/go.mod h1:0qNf/I/CCZXSMhsRsrEPDZ+DkekpKLXAJfsTACwgXLk=
-github.com/yeya24/promlinter v0.3.0 h1:JVDbMp08lVCP7Y6NP3qHroGAO6z2yGKQtS5JsjqtoFs=
-github.com/yeya24/promlinter v0.3.0/go.mod h1:cDfJQQYv9uYciW60QT0eeHlFodotkYZlL+YcPQN+mW4=
-github.com/ykadowak/zerologlint v0.1.5 h1:Gy/fMz1dFQN9JZTPjv1hxEk+sRWm05row04Yoolgdiw=
-github.com/ykadowak/zerologlint v0.1.5/go.mod h1:KaUskqF3e/v59oPmdq1U1DnKcuHokl2/K1U4pmIELKg=
-github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
-github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
-github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
-github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
-github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
-github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
-github.com/yuin/goldmark v1.7.7 h1:5m9rrB1sW3JUMToKFQfb+FGt1U7r57IHu5GrYrG2nqU=
-github.com/yuin/goldmark v1.7.7/go.mod h1:uzxRWxtg69N339t3louHJ7+O03ezfj6PlliRlaOzY1E=
-github.com/yuin/goldmark-meta v1.1.0 h1:pWw+JLHGZe8Rk0EGsMVssiNb/AaPMHfSRszZeUeiOUc=
-github.com/yuin/goldmark-meta v1.1.0/go.mod h1:U4spWENafuA7Zyg+Lj5RqK/MF+ovMYtBvXi1lBb2VP0=
-github.com/zclconf/go-cty v1.18.0 h1:pJ8+HNI4gFoyRNqVE37wWbJWVw43BZczFo7KUoRczaA=
-github.com/zclconf/go-cty v1.18.0/go.mod h1:qpnV6EDNgC1sns/AleL1fvatHw72j+S+nS+MJ+T2CSg=
+github.com/zclconf/go-cty v1.17.0 h1:seZvECve6XX4tmnvRzWtJNHdscMtYEx5R7bnnVyd/d0=
+github.com/zclconf/go-cty v1.17.0/go.mod h1:wqFzcImaLTI6A5HfsRwB0nj5n0MRZFwmey8YoFPPs3U=
github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940 h1:4r45xpDWB6ZMSMNJFMOjqrGHynW3DIBuR2H9j0ug+Mo=
github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940/go.mod h1:CmBdvvj3nqzfzJ6nTCIwDTPZ56aVGvDrmztiO5g3qrM=
-gitlab.com/bosi/decorder v0.4.2 h1:qbQaV3zgwnBZ4zPMhGLW4KZe7A7NwxEhJx39R3shffo=
-gitlab.com/bosi/decorder v0.4.2/go.mod h1:muuhHoaJkA9QLcYHq4Mj8FJUwDZ+EirSHRiaTcTf6T8=
-go-simpler.org/musttag v0.14.0 h1:XGySZATqQYSEV3/YTy+iX+aofbZZllJaqwFWs+RTtSo=
-go-simpler.org/musttag v0.14.0/go.mod h1:uP8EymctQjJ4Z1kUnjX0u2l60WfUdQxCwSNKzE1JEOE=
-go-simpler.org/sloglint v0.11.1 h1:xRbPepLT/MHPTCA6TS/wNfZrDzkGvCCqUv4Bdwc3H7s=
-go-simpler.org/sloglint v0.11.1/go.mod h1:2PowwiCOK8mjiF+0KGifVOT8ZsCNiFzvfyJeJOIt8MQ=
-go.abhg.dev/goldmark/frontmatter v0.2.0 h1:P8kPG0YkL12+aYk2yU3xHv4tcXzeVnN+gU0tJ5JnxRw=
-go.abhg.dev/goldmark/frontmatter v0.2.0/go.mod h1:XqrEkZuM57djk7zrlRUB02x8I5J0px76YjkOzhB4YlU=
-go.augendre.info/arangolint v0.4.0 h1:xSCZjRoS93nXazBSg5d0OGCi9APPLNMmmLrC995tR50=
-go.augendre.info/arangolint v0.4.0/go.mod h1:l+f/b4plABuFISuKnTGD4RioXiCCgghv2xqst/xOvAA=
-go.augendre.info/fatcontext v0.9.0 h1:Gt5jGD4Zcj8CDMVzjOJITlSb9cEch54hjRRlN3qDojE=
-go.augendre.info/fatcontext v0.9.0/go.mod h1:L94brOAT1OOUNue6ph/2HnwxoNlds9aXDF2FcUntbNw=
-go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
-go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
-go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
-go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
-go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64=
go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y=
-go.opentelemetry.io/otel v1.39.0 h1:8yPrr/S0ND9QEfTfdP9V+SiwT4E0G7Y5MO7p85nis48=
-go.opentelemetry.io/otel v1.39.0/go.mod h1:kLlFTywNWrFyEdH0oj2xK0bFYZtHRYUdv1NklR/tgc8=
-go.opentelemetry.io/otel/metric v1.39.0 h1:d1UzonvEZriVfpNKEVmHXbdf909uGTOQjA0HF0Ls5Q0=
-go.opentelemetry.io/otel/metric v1.39.0/go.mod h1:jrZSWL33sD7bBxg1xjrqyDjnuzTUB0x1nBERXd7Ftcs=
-go.opentelemetry.io/otel/sdk v1.39.0 h1:nMLYcjVsvdui1B/4FRkwjzoRVsMK8uL/cj0OyhKzt18=
-go.opentelemetry.io/otel/sdk v1.39.0/go.mod h1:vDojkC4/jsTJsE+kh+LXYQlbL8CgrEcwmt1ENZszdJE=
-go.opentelemetry.io/otel/sdk/metric v1.39.0 h1:cXMVVFVgsIf2YL6QkRF4Urbr/aMInf+2WKg+sEJTtB8=
-go.opentelemetry.io/otel/sdk/metric v1.39.0/go.mod h1:xq9HEVH7qeX69/JnwEfp6fVq5wosJsY1mt4lLfYdVew=
-go.opentelemetry.io/otel/trace v1.39.0 h1:2d2vfpEDmCJ5zVYz7ijaJdOF59xLomrvj7bjt6/qCJI=
-go.opentelemetry.io/otel/trace v1.39.0/go.mod h1:88w4/PnZSazkGzz/w84VHpQafiU4EtqqlVdxWy+rNOA=
-go.uber.org/multierr v1.10.0 h1:S0h4aNzvfcFsC3dRF1jLoaov7oRaKqRGC/pUEJ2yvPQ=
-go.uber.org/multierr v1.10.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
-go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8=
-go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E=
-go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc=
+go.opentelemetry.io/otel v1.38.0 h1:RkfdswUDRimDg0m2Az18RKOsnI8UDzppJAtj01/Ymk8=
+go.opentelemetry.io/otel v1.38.0/go.mod h1:zcmtmQ1+YmQM9wrNsTGV/q/uyusom3P8RxwExxkZhjM=
+go.opentelemetry.io/otel/metric v1.38.0 h1:Kl6lzIYGAh5M159u9NgiRkmoMKjvbsKtYRwgfrA6WpA=
+go.opentelemetry.io/otel/metric v1.38.0/go.mod h1:kB5n/QoRM8YwmUahxvI3bO34eVtQf2i4utNVLr9gEmI=
+go.opentelemetry.io/otel/sdk v1.38.0 h1:l48sr5YbNf2hpCUj/FoGhW9yDkl+Ma+LrVl8qaM5b+E=
+go.opentelemetry.io/otel/sdk v1.38.0/go.mod h1:ghmNdGlVemJI3+ZB5iDEuk4bWA3GkTpW+DOoZMYBVVg=
+go.opentelemetry.io/otel/sdk/metric v1.38.0 h1:aSH66iL0aZqo//xXzQLYozmWrXxyFkBJ6qT5wthqPoM=
+go.opentelemetry.io/otel/sdk/metric v1.38.0/go.mod h1:dg9PBnW9XdQ1Hd6ZnRz689CbtrUp0wMMs9iPcgT9EZA=
+go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJrmcNLE=
+go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs=
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
-golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
-golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
-golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
-golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
-golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
-golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4=
-golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
-golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4=
-golang.org/x/crypto v0.48.0 h1:/VRzVqiRSggnhY7gNRxPauEQ5Drw9haKdM0jqfcCFts=
-golang.org/x/crypto v0.48.0/go.mod h1:r0kV5h3qnFPlQnBSrULhlsRfryS2pmewsg+XfMgkVos=
-golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
-golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
-golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
-golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek=
-golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY=
-golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
-golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
-golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
-golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
-golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
-golang.org/x/exp v0.0.0-20230626212559-97b1e661b5df h1:UA2aFVmmsIlefxMk29Dp2juaUSth8Pyn3Tq5Y5mJGME=
-golang.org/x/exp v0.0.0-20230626212559-97b1e661b5df/go.mod h1:FXUEEKJgO7OQYeo8N01OfiKP8RXMtf6e8aTskBGqWdc=
-golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b h1:M2rDM6z3Fhozi9O7NWsxAkg/yqS/lQJ6PmkyIV3YP+o=
-golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b/go.mod h1:3//PLf8L/X+8b4vuAfHzxeRUl04Adcb341+IGKfnqS8=
-golang.org/x/exp/typeparams v0.0.0-20220428152302-39d4317da171/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk=
-golang.org/x/exp/typeparams v0.0.0-20230203172020-98cc5a0785f9/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk=
-golang.org/x/exp/typeparams v0.0.0-20260209203927-2842357ff358 h1:qWFG1Dj7TBjOjOvhEOkmyGPVoquqUKnIU0lEVLp8xyk=
-golang.org/x/exp/typeparams v0.0.0-20260209203927-2842357ff358/go.mod h1:4Mzdyp/6jzw9auFDJ3OMF5qksa7UvPnzKqTVGcb04ms=
-golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
-golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
-golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
-golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
-golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
-golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
-golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
-golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
-golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
-golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs=
-golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
-golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
-golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE=
-golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o=
-golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
-golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY=
-golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
-golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
-golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
-golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
-golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
-golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
-golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY=
+golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8=
+golang.org/x/crypto v0.47.0/go.mod h1:ff3Y9VzzKbwSSEzWqJsJVBnWmRwRSHt/6Op5n9bQc4A=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
-golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
-golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
-golang.org/x/mod v0.13.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
-golang.org/x/mod v0.33.0 h1:tHFzIWbBifEmbwtGz65eaWyGiGZatSrT9prnU8DbVL8=
-golang.org/x/mod v0.33.0/go.mod h1:swjeQEj+6r7fODbD2cqrnje9PnziFuw4bmLbBZFrQ5w=
-golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/mod v0.32.0 h1:9F4d3PHLljb6x//jOyokMv3eX+YDeepZSEo3mFJy93c=
+golang.org/x/mod v0.32.0/go.mod h1:SgipZ/3h2Ci89DlEtEXWUk/HteuRin+HHhN+WbNhguU=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
-golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
-golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
-golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
-golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
-golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
-golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
-golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
-golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
-golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
-golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
-golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
-golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
-golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
-golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
-golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
-golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
-golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
-golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
-golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
-golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
-golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
-golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
-golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
-golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
-golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY=
-golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
-golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
-golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
-golang.org/x/net v0.16.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
-golang.org/x/net v0.51.0 h1:94R/GTO7mt3/4wIKpcR5gkGmRLOuE/2hNGeWq/GBIFo=
-golang.org/x/net v0.51.0/go.mod h1:aamm+2QF5ogm02fjy5Bb7CQ0WMt1/WVM7FtyaTLlA9Y=
-golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
-golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
-golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
-golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
-golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
-golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
+golang.org/x/net v0.49.0 h1:eeHFmOGUTtaaPSGNmjBKpbng9MulQsJURQUAfUwY++o=
+golang.org/x/net v0.49.0/go.mod h1:/ysNB2EvaqvesRkuLAyjI1ycPZlQHM3q01F02UY/MV8=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
-golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
-golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.0.0-20211105183446-c75c47738b0c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k=
-golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
+golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ=
+golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
+golang.org/x/telemetry v0.0.0-20260116145544-c6413dc483f5 h1:i0p03B68+xC1kD2QUO8JzDTPXCzhN56OLJ+IhHY8U3A=
+golang.org/x/telemetry v0.0.0-20260116145544-c6413dc483f5/go.mod h1:b7fPSJ0pKZ3ccUh8gnTONJxhn3c/PS6tyzQvyqw4iA8=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
-golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc=
-golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
-golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
-golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
-golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U=
-golang.org/x/term v0.40.0 h1:36e4zGLqU4yhjlmxEaagx2KuYbJq3EwY8K943ZsHcvg=
-golang.org/x/term v0.40.0/go.mod h1:w2P8uVp06p2iyKKuvXIm7N/y0UCRt3UfJTfZ7oOpglM=
-golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/term v0.39.0 h1:RclSuaJf32jOqZz74CkPA9qFuVTX7vhLlpfj/IGWlqY=
+golang.org/x/term v0.39.0/go.mod h1:yxzUCTP/U+FzoxfdKmLaA0RV1WgE0VY7hXBwKtY/4ww=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
-golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
-golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
-golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
-golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
-golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
-golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
-golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
-golang.org/x/text v0.34.0 h1:oL/Qq0Kdaqxa1KbNeMKwQq0reLCCaFtqu2eNuSeNHbk=
-golang.org/x/text v0.34.0/go.mod h1:homfLqTYRFyVYemLBFl5GgL/DWEiH5wcsQ5gSh1yziA=
-golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
-golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
-golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
+golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE=
+golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
-golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
-golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
-golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
-golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
-golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
-golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
-golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
-golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
-golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
-golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
-golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
-golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
-golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
-golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
-golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
-golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
-golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
-golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
-golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
-golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
-golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
-golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
-golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
-golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
-golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
-golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
-golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
-golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
-golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw=
-golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw=
-golang.org/x/tools v0.0.0-20200329025819-fd4102a86c65/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8=
-golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8=
-golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
-golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
-golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
-golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
-golang.org/x/tools v0.0.0-20200724022722-7017fd6b1305/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
-golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
-golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
-golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
-golang.org/x/tools v0.1.1-0.20210205202024-ef80cdb6ec6d/go.mod h1:9bzcO0MWcOuT0tm1iBGzDVPshzfwoVvREIui8C+MHqU=
-golang.org/x/tools v0.1.1-0.20210302220138-2ac05c832e1a/go.mod h1:9bzcO0MWcOuT0tm1iBGzDVPshzfwoVvREIui8C+MHqU=
-golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
-golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
-golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
-golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
-golang.org/x/tools v0.14.0/go.mod h1:uYBEerGOWcJyEORxN+Ek8+TT266gXkNlHdJBwexUsBg=
-golang.org/x/tools v0.42.0 h1:uNgphsn75Tdz5Ji2q36v/nsFSfR/9BRFvqhGBaJGd5k=
-golang.org/x/tools v0.42.0/go.mod h1:Ma6lCIwGZvHK6XtgbswSoWroEkhugApmsXyrUmBhfr0=
+golang.org/x/tools v0.41.0 h1:a9b8iMweWG+S0OBnlU36rzLp20z1Rp10w+IY2czHTQc=
+golang.org/x/tools v0.41.0/go.mod h1:XSY6eDqxVNiYgezAVqqCeihT4j1U2CCsqvH3WhQpnlg=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
-golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
-golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=
gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E=
-google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
-google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
-google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
-google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
-google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
-google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
-google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
-google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
-google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
-google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
-google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
-google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
-google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE=
-google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE=
-google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM=
-google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
-google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
-google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
-google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=
-google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
-google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM=
google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds=
-google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
-google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
-google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
-google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
-google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
-google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
-google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
-google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8=
-google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
-google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
-google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
-google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
-google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
-google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
-google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA=
-google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
-google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
-google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
-google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
-google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
-google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
-google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
-google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
-google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U=
-google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=
-google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA=
-google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
-google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
-google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
-google.golang.org/genproto/googleapis/rpc v0.0.0-20260226221140-a57be14db171 h1:ggcbiqK8WWh6l1dnltU4BgWGIGo+EVYxCaAPih/zQXQ=
-google.golang.org/genproto/googleapis/rpc v0.0.0-20260226221140-a57be14db171/go.mod h1:4Hqkh8ycfw05ld/3BWL7rJOSfebL2Q+DVDeRgYgxUU8=
-google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
-google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
-google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
-google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
-google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
-google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
-google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
-google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
-google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60=
-google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk=
-google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
-google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
-google.golang.org/grpc v1.79.2 h1:fRMD94s2tITpyJGtBBn7MkMseNpOZU8ZxgC3MMBaXRU=
-google.golang.org/grpc v1.79.2/go.mod h1:KmT0Kjez+0dde/v2j9vzwoAScgEPx/Bw1CYChhHLrHQ=
-google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
-google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
-google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
-google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=
-google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
-google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
-google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
-google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
-google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4=
-google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
+google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 h1:sNrWoksmOyF5bvJUcnmbeAmQi8baNhqg5IWaI3llQqU=
+google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ=
+google.golang.org/grpc v1.78.0 h1:K1XZG/yGDJnzMdd/uZHAkVqJE+xIDOcmdSFZkBUicNc=
+google.golang.org/grpc v1.78.0/go.mod h1:I47qjTo4OKbMkjA/aOOwxDIiPSBofUtQUI5EfpWvW7U=
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE=
google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco=
-gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
-gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
-gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
-gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
-gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME=
gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI=
-gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
-gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
-gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
-gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
-gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
-gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
-gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
-honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
-honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
-honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
-honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
-honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
-honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
-honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
-honnef.co/go/tools v0.7.0 h1:w6WUp1VbkqPEgLz4rkBzH/CSU6HkoqNLp6GstyTx3lU=
-honnef.co/go/tools v0.7.0/go.mod h1:pm29oPxeP3P82ISxZDgIYeOaf9ta6Pi0EWvCFoLG2vc=
-mvdan.cc/gofumpt v0.9.2 h1:zsEMWL8SVKGHNztrx6uZrXdp7AX8r421Vvp23sz7ik4=
-mvdan.cc/gofumpt v0.9.2/go.mod h1:iB7Hn+ai8lPvofHd9ZFGVg2GOr8sBUw1QUWjNbmIL/s=
-mvdan.cc/unparam v0.0.0-20251027182757-5beb8c8f8f15 h1:ssMzja7PDPJV8FStj7hq9IKiuiKhgz9ErWw+m68e7DI=
-mvdan.cc/unparam v0.0.0-20251027182757-5beb8c8f8f15/go.mod h1:4M5MMXl2kW6fivUT6yRGpLLPNfuGtU2Z0cPvFquGDYU=
-rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
-rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
-rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=
diff --git a/.golang-ci.yaml b/golang-ci.yaml
similarity index 63%
rename from .golang-ci.yaml
rename to golang-ci.yaml
index a9fa6be5..b3f00eb7 100644
--- a/.golang-ci.yaml
+++ b/golang-ci.yaml
@@ -1,13 +1,7 @@
+
version: "2"
run:
concurrency: 4
-output:
- formats:
- text:
- print-linter-name: true
- print-issued-lines: true
- colors: true
- path: stdout
linters:
enable:
- bodyclose
@@ -30,11 +24,6 @@ linters:
rules:
main:
list-mode: lax
- allow:
- - tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview
- - github.com/hashicorp/terraform-plugin-framework
- - github.com/hashicorp/terraform-plugin-log
- - github.com/stackitcloud/stackit-sdk-go
deny:
- pkg: github.com/stretchr/testify
desc: Do not use a testing framework
@@ -74,21 +63,24 @@ linters:
- name: empty-lines
- name: early-return
exclusions:
- paths:
- - generator/
generated: lax
- warn-unused: true
- # Excluding configuration per-path, per-linter, per-text and per-source.
- rules:
- # Exclude some linters from running on tests files.
- - path: _test\.go
- linters:
- - gochecknoinits
+ paths:
+ - third_party$
+ - builtin$
+ - examples$
+ - tools/copy.go
+ - tools/main.go
formatters:
enable:
- #- gofmt
+ - gofmt
- goimports
settings:
goimports:
local-prefixes:
- - tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview
\ No newline at end of file
+ - github.com/freiheit-com/nmww
+ exclusions:
+ generated: lax
+ paths:
+ - third_party$
+ - builtin$
+ - examples$
diff --git a/golang-ci.yaml.bak b/golang-ci.yaml.bak
deleted file mode 100644
index 11f74066..00000000
--- a/golang-ci.yaml.bak
+++ /dev/null
@@ -1,97 +0,0 @@
-
-version: "2"
-run:
- concurrency: 4
-output:
- formats:
- text:
- print-linter-name: true
- print-issued-lines: true
- colors: true
- path: stdout
-linters:
- enable:
- - bodyclose
- - depguard
- - errorlint
- - forcetypeassert
- - gochecknoinits
- - gocritic
- - gosec
- - misspell
- - nakedret
- - revive
- - sqlclosecheck
- - wastedassign
- disable:
- - noctx
- - unparam
- settings:
- depguard:
- rules:
- main:
- list-mode: lax
- allow:
- - tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview
- - github.com/hashicorp/terraform-plugin-framework
- - github.com/hashicorp/terraform-plugin-log
- - github.com/stackitcloud/stackit-sdk-go
- deny:
- - pkg: github.com/stretchr/testify
- desc: Do not use a testing framework
- gocritic:
- disabled-checks:
- - wrapperFunc
- - typeDefFirst
- - ifElseChain
- - dupImport
- - hugeParam
- enabled-tags:
- - performance
- - style
- - experimental
- gosec:
- excludes:
- - G104
- - G102
- - G304
- - G307
- misspell:
- locale: US
- nakedret:
- max-func-lines: 0
- revive:
- severity: error
- rules:
- - name: errorf
- - name: context-as-argument
- - name: error-return
- - name: increment-decrement
- - name: indent-error-flow
- - name: superfluous-else
- - name: unused-parameter
- - name: unreachable-code
- - name: atomic
- - name: empty-lines
- - name: early-return
- exclusions:
- paths:
- - stackit-sdk-generator/
- - generated/
- - pkg_gen/
- generated: lax
- warn-unused: true
- # Excluding configuration per-path, per-linter, per-text and per-source.
- rules:
- # Exclude some linters from running on tests files.
- - path: _test\.go
- linters:
- - gochecknoinits
-formatters:
- enable:
- - gofmt
- - goimports
- settings:
- goimports:
- local-prefixes:
- - tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview
\ No newline at end of file
diff --git a/internal/testutils/activateMocks.go b/internal/testutils/activateMocks.go
deleted file mode 100644
index c8f7dd05..00000000
--- a/internal/testutils/activateMocks.go
+++ /dev/null
@@ -1,39 +0,0 @@
-package testutils
-
-import (
- "fmt"
- "net/http"
- "path/filepath"
- "regexp"
- "runtime"
- "strings"
-
- "github.com/jarcoal/httpmock"
-)
-
-func TestName() string {
- pc, _, _, _ := runtime.Caller(1)
- nameFull := runtime.FuncForPC(pc).Name()
- nameEnd := filepath.Ext(nameFull)
- name := strings.TrimPrefix(nameEnd, ".")
- return name
-}
-
-func ActivateEnvironmentHttpMocks() {
- httpmock.RegisterNoResponder(
- func(req *http.Request) (*http.Response, error) {
- return nil, fmt.Errorf("no responder found for %s %s, please check your http mocks", req.Method, req.URL)
- },
- )
-
- httpmock.RegisterRegexpResponder(
- "GET",
- regexp.MustCompile(`^https://api\.bap\.microsoft\.com/providers/Microsoft\.BusinessAppPlatform/locations/(europe|unitedstates)/environmentLanguages\?api-version=2023-06-01$`),
- func(_ *http.Request) (*http.Response, error) {
- return httpmock.NewStringResponse(
- http.StatusOK,
- httpmock.File("../../services/languages/tests/datasource/Validate_Read/get_languages.json").String(),
- ), nil
- },
- )
-}
diff --git a/internal/testutils/functions.go b/internal/testutils/functions.go
deleted file mode 100644
index 5b8f2970..00000000
--- a/internal/testutils/functions.go
+++ /dev/null
@@ -1,129 +0,0 @@
-package testutils
-
-import (
- "bytes"
- "fmt"
- "log"
- "os"
- "path"
- "path/filepath"
- "runtime"
- "strings"
- "testing"
- "text/template"
-)
-
-// GetHomeEnvVariableName Helper function to obtain the home directory on different systems.
-// Based on os.UserHomeDir().
-func GetHomeEnvVariableName() string {
- env := "HOME"
- switch runtime.GOOS {
- case "windows":
- env = "USERPROFILE"
- case "plan9":
- env = "home"
- }
- return env
-}
-
-// CreateTemporaryHome create temporary home and initialize the credentials file as well
-func CreateTemporaryHome(createValidCredentialsFile bool, t *testing.T) string {
- // create a temporary file
- tempHome, err := os.MkdirTemp("", "tempHome")
- if err != nil {
- t.Fatalf("Failed to create temporary home directory: %v", err)
- }
-
- // create credentials file in temp directory
- stackitFolder := path.Join(tempHome, ".stackit")
- if err := os.Mkdir(stackitFolder, 0o750); err != nil {
- t.Fatalf("Failed to create stackit folder: %v", err)
- }
-
- filePath := path.Join(stackitFolder, "credentials.json")
- file, err := os.Create(filePath)
- if err != nil {
- t.Fatalf("Failed to create credentials file: %v", err)
- }
- defer func() {
- if err := file.Close(); err != nil {
- t.Fatalf("Error while closing the file: %v", err)
- }
- }()
-
- // Define content, default = invalid token
- token := "foo_token"
- //if createValidCredentialsFile {
- // token = GetTestProjectServiceAccountJson("")
- //}
- if _, err = file.WriteString(token); err != nil {
- t.Fatalf("Error writing to file: %v", err)
- }
-
- return tempHome
-}
-
-// SetTemporaryHome Function to overwrite the home folder
-func SetTemporaryHome(tempHomePath string) {
- env := GetHomeEnvVariableName()
- if err := os.Setenv(env, tempHomePath); err != nil {
- fmt.Printf("Error setting temporary home directory %v", err)
- }
-}
-
-// CleanupTemporaryHome cleanup the temporary home and reset the environment variable
-func CleanupTemporaryHome(tempHomePath string, t *testing.T) {
- if err := os.RemoveAll(tempHomePath); err != nil {
- t.Fatalf("Error cleaning up temporary folder: %v", err)
- }
- originalHomeDir, err := os.UserHomeDir()
- if err != nil {
- t.Fatalf("Failed to restore home directory back to normal: %v", err)
- }
- // revert back to original home folder
- env := GetHomeEnvVariableName()
- if err := os.Setenv(env, originalHomeDir); err != nil {
- fmt.Printf("Error resetting temporary home directory %v", err)
- }
-}
-
-func ucFirst(s string) string {
- if s == "" {
- return ""
- }
- return strings.ToUpper(s[:1]) + s[1:]
-}
-
-func StringFromTemplateMust(tplFile string, data any) string {
- res, err := StringFromTemplate(tplFile, data)
- if err != nil {
- log.Fatalln(err)
- }
- return res
-}
-
-func StringFromTemplate(tplFile string, data any) (string, error) {
- fn := template.FuncMap{
- "ucfirst": ucFirst,
- }
-
- file := filepath.Base(tplFile)
-
- tmpl, err := template.New(file).Funcs(fn).ParseFiles(tplFile)
- if err != nil {
- return "", err
- }
-
- tplBuf := &bytes.Buffer{}
-
- err = tmpl.Execute(tplBuf, data)
- if err != nil {
- return "", err
- }
-
- return tplBuf.String(), nil
-}
-
-func ResStr(prefix, resource, name string) string {
- return fmt.Sprintf("%s_%s.%s", prefix, resource, name)
-}
diff --git a/internal/testutils/testutils.go b/internal/testutils/testutils.go
deleted file mode 100644
index 142efe13..00000000
--- a/internal/testutils/testutils.go
+++ /dev/null
@@ -1,220 +0,0 @@
-package testutils
-
-import (
- "fmt"
- "log"
- "log/slog"
- "os"
- "os/exec"
- "strings"
- "time"
-
- "github.com/hashicorp/terraform-plugin-framework/providerserver"
- "github.com/hashicorp/terraform-plugin-go/tfprotov6"
- "github.com/hashicorp/terraform-plugin-testing/config"
- "github.com/hashicorp/terraform-plugin-testing/echoprovider"
- "github.com/joho/godotenv"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit"
-)
-
-const (
- // Default location of service account JSON
- serviceAccountFilePath = "service_account.json"
-)
-
-var (
- // TestAccProtoV6ProviderFactories is used to instantiate a provider during
- // acceptance testing. The factory function will be invoked for every Terraform
- // CLI command executed to create a provider server to which the CLI can
- // reattach.
- TestAccProtoV6ProviderFactories = map[string]func() (tfprotov6.ProviderServer, error){
- "stackitprivatepreview": providerserver.NewProtocol6WithError(stackit.New("test-version")()),
- }
-
- // TestEphemeralAccProtoV6ProviderFactories is used to instantiate a provider during
- // acceptance testing. The factory function will be invoked for every Terraform
- // CLI command executed to create a provider server to which the CLI can
- // reattach.
- //
- // See the Terraform acceptance test documentation on ephemeral resources for more information:
- // https://developer.hashicorp.com/terraform/plugin/testing/acceptance-tests/ephemeral-resources
- TestEphemeralAccProtoV6ProviderFactories = map[string]func() (tfprotov6.ProviderServer, error){
- "stackitprivatepreview": providerserver.NewProtocol6WithError(stackit.New("test-version")()),
- "echo": echoprovider.NewProviderServer(),
- }
-
- // E2ETestsEnabled checks if end-to-end tests should be run.
- // It is enabled when the TF_ACC environment variable is set to "1".
- E2ETestsEnabled = os.Getenv("TF_ACC") == "1"
- // OrganizationId is the id of organization used for tests
- OrganizationId = os.Getenv("TF_ACC_ORGANIZATION_ID")
- // ProjectId is the id of project used for tests
- ProjectId = os.Getenv("TF_ACC_PROJECT_ID")
- Region = os.Getenv("TF_ACC_REGION")
- // ServiceAccountFile is the json file of the service account
- ServiceAccountFile = os.Getenv("TF_ACC_SERVICE_ACCOUNT_FILE")
- // ServerId is the id of a server used for some tests
- ServerId = getenv("TF_ACC_SERVER_ID", "")
- // TestProjectParentContainerID is the container id of the parent resource under which projects are created as part of the resource-manager acceptance tests
- TestProjectParentContainerID = os.Getenv("TF_ACC_TEST_PROJECT_PARENT_CONTAINER_ID")
- // TestProjectParentUUID is the uuid of the parent resource under which projects are created as part of the resource-manager acceptance tests
- TestProjectParentUUID = os.Getenv("TF_ACC_TEST_PROJECT_PARENT_UUID")
- // TestProjectServiceAccountEmail is the e-mail of a service account with admin permissions on the organization under which projects are created as part of the resource-manager acceptance tests
- TestProjectServiceAccountEmail = os.Getenv("TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL")
- // TestProjectUserEmail is the e-mail of a user for the project created as part of the resource-manager acceptance tests
- // Default email: acc-test@sa.stackit.cloud
- TestProjectUserEmail = getenv("TF_ACC_TEST_PROJECT_USER_EMAIL", "acc-test@sa.stackit.cloud")
- // TestImageLocalFilePath is the local path to an image file used for image acceptance tests
- TestImageLocalFilePath = getenv("TF_ACC_TEST_IMAGE_LOCAL_FILE_PATH", "default")
-)
-
-func Setup() {
- root, err := getRoot()
- if err != nil {
- log.Fatalln(err)
- }
- err = godotenv.Load(fmt.Sprintf("%s/.env", *root))
- if err != nil {
- slog.Info("could not find .env file - not loading .env")
- return
- }
- slog.Info("loaded .env file", "path", *root)
-}
-
-func getRoot() (*string, error) {
- cmd := exec.Command("git", "rev-parse", "--show-toplevel")
- out, err := cmd.Output()
- if err != nil {
- return nil, err
- }
- lines := strings.Split(string(out), "\n")
- return &lines[0], nil
-}
-
-func ResourceNameWithDateTime(name string) string {
- dateTime := time.Now().Format(time.RFC3339)
- // Remove timezone to have a smaller datetime
- dateTimeTrimmed, _, _ := strings.Cut(dateTime, "+")
- return fmt.Sprintf("tf-acc-%s-%s", name, dateTimeTrimmed)
-}
-
-//func GetTestProjectServiceAccountJson(path string) string {
-// var err error
-// json, ok := os.LookupEnv("TF_ACC_SERVICE_ACCOUNT_JSON_CONTENT")
-// if !ok || json == "" {
-// json, err = readTestServiceAccountJsonFromFile(path)
-// if err != nil {
-// return ""
-// }
-// }
-// return json
-//}
-
-// func GetTestProjectServiceAccountToken(path string) string {
-// var err error
-// token, tokenSet := os.LookupEnv("TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN")
-// if !tokenSet || token == "" {
-// token, err = readTestTokenFromCredentialsFile(path)
-// if err != nil {
-// return ""
-// }
-// }
-// return token
-//}
-//
-// func readTestTokenFromCredentialsFile(path string) (string, error) {
-// if path == "" {
-// customPath, customPathSet := os.LookupEnv("STACKIT_CREDENTIALS_PATH")
-// if !customPathSet || customPath == "" {
-// path = credentialsFilePath
-// home, err := os.UserHomeDir()
-// if err != nil {
-// return "", fmt.Errorf("getting home directory: %w", err)
-// }
-// path = filepath.Join(home, path)
-// } else {
-// path = customPath
-// }
-// }
-//
-// credentialsRaw, err := os.ReadFile(path)
-// if err != nil {
-// return "", fmt.Errorf("opening file: %w", err)
-// }
-//
-// var credentials struct {
-// TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN string `json:"TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN"`
-// }
-// err = json.Unmarshal(credentialsRaw, &credentials)
-// if err != nil {
-// return "", fmt.Errorf("unmarshalling credentials: %w", err)
-// }
-// return credentials.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN, nil
-//}
-
-//func readTestServiceAccountJsonFromFile(path string) (string, error) {
-// if path == "" {
-// customPath, ok := os.LookupEnv("TF_ACC_SERVICE_ACCOUNT_FILE")
-// if !ok || customPath == "" {
-// path = serviceAccountFilePath
-// // TODO: check if we want to handle this with a home dir
-// /*
-// home, err := os.UserHomeDir()
-// if err != nil {
-// return "", fmt.Errorf("getting home directory: %w", err)
-// }
-// path = filepath.Join(home, path)
-// */
-// } else {
-// path = customPath
-// }
-// }
-//
-// credentialsRaw, err := os.ReadFile(path)
-// if err != nil {
-// return "", fmt.Errorf("opening file: %w", err)
-// }
-// return string(credentialsRaw), nil
-//}
-
-func getenv(key, defaultValue string) string {
- val := os.Getenv(key)
- if val == "" {
- return defaultValue
- }
- return val
-}
-
-// CreateDefaultLocalFile is a helper for local_file_path. No real data is created
-func CreateDefaultLocalFile() os.File {
- // Define the file name and size
- fileName := "test-512k.img"
- size := 512 * 1024 // 512 KB
-
- // Create the file
- file, err := os.Create(fileName)
- if err != nil {
- panic(err)
- }
-
- // Seek to the desired position (512 KB)
- _, err = file.Seek(int64(size), 0)
- if err != nil {
- panic(err)
- }
-
- return *file
-}
-
-func ConvertConfigVariable(variable config.Variable) string {
- tmpByteArray, _ := variable.MarshalJSON()
- // In case the variable is a string, the quotes should be removed
- if tmpByteArray[0] == '"' && tmpByteArray[len(tmpByteArray)-1] == '"' {
- result := string(tmpByteArray[1 : len(tmpByteArray)-1])
- // Replace escaped quotes which where added MarshalJSON
- rawString := strings.ReplaceAll(result, `\"`, `"`)
- return rawString
- }
- return string(tmpByteArray)
-}
diff --git a/main.go b/main.go
index ab603dd6..6d7793da 100644
--- a/main.go
+++ b/main.go
@@ -6,7 +6,6 @@ import (
"log"
"github.com/hashicorp/terraform-plugin-framework/providerserver"
-
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit"
)
diff --git a/sample/postgres/postresql.tf b/sample/postgres/postresql.tf
index 531b17e2..fa2f49e8 100644
--- a/sample/postgres/postresql.tf
+++ b/sample/postgres/postresql.tf
@@ -65,15 +65,15 @@ resource "stackitprivatepreview_postgresflexalpha_instance" "msh-sna-pe-example2
resource "stackitprivatepreview_postgresflexalpha_user" "ptlsdbadminuser" {
project_id = var.project_id
instance_id = stackitprivatepreview_postgresflexalpha_instance.msh-sna-pe-example.instance_id
- name = var.db_admin_username
- roles = ["createdb", "login", "login"]
+ username = var.db_admin_username
+ roles = ["createdb", "login"]
# roles = ["createdb", "login", "createrole"]
}
resource "stackitprivatepreview_postgresflexalpha_user" "ptlsdbadminuser2" {
project_id = var.project_id
instance_id = stackitprivatepreview_postgresflexalpha_instance.msh-sna-pe-example2.instance_id
- name = var.db_admin_username
+ username = var.db_admin_username
roles = ["createdb", "login"]
# roles = ["createdb", "login", "createrole"]
}
@@ -81,7 +81,7 @@ resource "stackitprivatepreview_postgresflexalpha_user" "ptlsdbadminuser2" {
resource "stackitprivatepreview_postgresflexalpha_user" "ptlsdbuser" {
project_id = var.project_id
instance_id = stackitprivatepreview_postgresflexalpha_instance.msh-sna-pe-example.instance_id
- name = var.db_name
+ username = var.db_username
roles = ["login"]
# roles = ["createdb", "login", "createrole"]
}
diff --git a/sample/sqlserver/flavor.tf b/sample/sqlserver/flavor.tf
index c491cc09..216c8f1e 100644
--- a/sample/sqlserver/flavor.tf
+++ b/sample/sqlserver/flavor.tf
@@ -1,5 +1,5 @@
-data "stackitprivatepreview_sqlserverflexbeta_flavor" "sqlserver_flavor" {
+data "stackitprivatepreview_sqlserverflexalpha_flavor" "sqlserver_flavor" {
project_id = var.project_id
region = "eu01"
cpu = 4
@@ -9,5 +9,5 @@ data "stackitprivatepreview_sqlserverflexbeta_flavor" "sqlserver_flavor" {
}
output "sqlserver_flavor" {
- value = data.stackitprivatepreview_sqlserverflexbeta_flavor.sqlserver_flavor.flavor_id
+ value = data.stackitprivatepreview_sqlserverflexalpha_flavor.sqlserver_flavor.flavor_id
}
diff --git a/sample/sqlserver/sqlserver.tf b/sample/sqlserver/sqlserver.tf
index d18f499c..365a2005 100644
--- a/sample/sqlserver/sqlserver.tf
+++ b/sample/sqlserver/sqlserver.tf
@@ -18,15 +18,15 @@
# value = stackit_kms_key.key.key_id
# }
-resource "stackitprivatepreview_sqlserverflexbeta_instance" "msh-beta-sna-001" {
+resource "stackitprivatepreview_sqlserverflexalpha_instance" "msh-sna-001" {
project_id = var.project_id
- name = "msh-beta-sna-001"
+ name = "msh-sna-001"
backup_schedule = "0 3 * * *"
retention_days = 31
- flavor_id = data.stackitprivatepreview_sqlserverflexbeta_flavor.sqlserver_flavor.flavor_id
+ flavor_id = data.stackitprivatepreview_sqlserverflexalpha_flavor.sqlserver_flavor.flavor_id
storage = {
class = "premium-perf2-stackit"
- size = 10
+ size = 50
}
version = 2022
encryption = {
@@ -34,12 +34,10 @@ resource "stackitprivatepreview_sqlserverflexbeta_instance" "msh-beta-sna-001" {
#keyring_id = stackit_kms_keyring.keyring.keyring_id
#key_version = 1
# key with scope public
- # kek_key_id = "fe039bcf-8d7b-431a-801d-9e81371a6b7b"
- kek_key_id = "c6878f92-ce55-4b79-8236-ba9d001d7967" # msh-k-001
+ key_id = "fe039bcf-8d7b-431a-801d-9e81371a6b7b"
# key_id = var.key_id
- # kek_key_ring_id = var.keyring_id
- kek_key_ring_id = "0dea3f5f-9947-4dda-a9d3-18418832cefe" # msh-kr-sna01
- kek_key_version = var.key_version
+ keyring_id = var.keyring_id
+ key_version = var.key_version
service_account = var.sa_email
}
network = {
@@ -48,16 +46,55 @@ resource "stackitprivatepreview_sqlserverflexbeta_instance" "msh-beta-sna-001" {
}
}
-resource "stackitprivatepreview_sqlserverflexbeta_user" "betauser" {
- project_id = var.project_id
- instance_id = stackitprivatepreview_sqlserverflexbeta_instance.msh-beta-sna-001.instance_id
- username = "betauser"
- roles = ["##STACKIT_DatabaseManager##", "##STACKIT_LoginManager##"]
+resource "stackitprivatepreview_sqlserverflexalpha_instance" "msh-nosna-001" {
+ project_id = var.project_id
+ name = "msh-nosna-001"
+ backup_schedule = "0 3 * * *"
+ retention_days = 31
+ flavor_id = data.stackitprivatepreview_sqlserverflexalpha_flavor.sqlserver_flavor.flavor_id
+ storage = {
+ class = "premium-perf2-stackit"
+ size = 50
+ }
+ version = 2022
+ # encryption = {
+ # #key_id = stackit_kms_key.key.key_id
+ # #keyring_id = stackit_kms_keyring.keyring.keyring_id
+ # #key_version = 1
+ # #key_id = var.key_id
+ # # key with scope public
+ # key_id = "fe039bcf-8d7b-431a-801d-9e81371a6b7b"
+ # keyring_id = var.keyring_id
+ # key_version = var.key_version
+ # service_account = var.sa_email
+ # }
+ network = {
+ acl = ["0.0.0.0/0", "193.148.160.0/19"]
+ access_scope = "PUBLIC"
+ }
}
-resource "stackitprivatepreview_sqlserverflexbeta_database" "betadb" {
- project_id = var.project_id
- instance_id = stackitprivatepreview_sqlserverflexbeta_instance.msh-beta-sna-001.instance_id
- name = "mshtest002"
- owner = stackitprivatepreview_sqlserverflexbeta_user.betauser.username
-}
+# data "stackitprivatepreview_sqlserverflexalpha_instance" "test" {
+# project_id = var.project_id
+# instance_id = var.instance_id
+# region = "eu01"
+# }
+
+# output "test" {
+# value = data.stackitprivatepreview_sqlserverflexalpha_instance.test
+# }
+
+# resource "stackitprivatepreview_sqlserverflexalpha_user" "ptlsdbadminuser" {
+# project_id = var.project_id
+# instance_id = stackitprivatepreview_sqlserverflexalpha_instance.sqlsrv.instance_id
+# username = var.db_admin_username
+# roles = ["##STACKIT_LoginManager##", "##STACKIT_DatabaseManager##"]
+# }
+
+# resource "stackitprivatepreview_sqlserverflexalpha_user" "ptlsdbuser" {
+# project_id = var.project_id
+# instance_id = stackitprivatepreview_sqlserverflexalpha_instance.sqlsrv.instance_id
+# username = var.db_username
+# roles = ["##STACKIT_LoginManager##"]
+# }
+
diff --git a/scripts/lint-golangci-lint.sh b/scripts/lint-golangci-lint.sh
new file mode 100755
index 00000000..0a883589
--- /dev/null
+++ b/scripts/lint-golangci-lint.sh
@@ -0,0 +1,19 @@
+#!/usr/bin/env bash
+
+# This script lints the SDK modules and the internal examples
+# Pre-requisites: golangci-lint
+set -eo pipefail
+
+ROOT_DIR=$(git rev-parse --show-toplevel)
+GOLANG_CI_YAML_PATH="${ROOT_DIR}/golang-ci.yaml"
+GOLANG_CI_ARGS="--allow-parallel-runners --timeout=5m --config=${GOLANG_CI_YAML_PATH}"
+
+if type -p golangci-lint >/dev/null; then
+ :
+else
+ echo "golangci-lint not installed, unable to proceed."
+ exit 1
+fi
+
+cd ${ROOT_DIR}
+golangci-lint run ${GOLANG_CI_ARGS}
diff --git a/scripts/project.sh b/scripts/project.sh
index 68585774..1d570c6a 100755
--- a/scripts/project.sh
+++ b/scripts/project.sh
@@ -17,7 +17,11 @@ elif [ "$action" = "tools" ]; then
go mod download
- go install golang.org/x/tools/cmd/goimports@v0.42.0
+ # go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.62.0
+ go install github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.7.2
+
+ # go install github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs@v0.21.0
+ go install github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs@v0.24.0
else
echo "Invalid action: '$action', please use $0 help for help"
fi
diff --git a/scripts/tfplugindocs.sh b/scripts/tfplugindocs.sh
index e77b6a98..6f9d5d1b 100755
--- a/scripts/tfplugindocs.sh
+++ b/scripts/tfplugindocs.sh
@@ -14,5 +14,5 @@ fi
mkdir -p ${ROOT_DIR}/docs
echo ">> Generating documentation"
-go run github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs generate \
+tfplugindocs generate \
--provider-name "stackitprivatepreview"
diff --git a/service_specs/postgres-flex/generator_settings.yml b/service_specs/postgres-flex/generator_settings.yml
deleted file mode 100644
index 8e8af766..00000000
--- a/service_specs/postgres-flex/generator_settings.yml
+++ /dev/null
@@ -1,3 +0,0 @@
-versions:
- - name: alpha
- path: v3alpha1
diff --git a/service_specs/postgres-flex/alpha/database_config.yml b/service_specs/postgres-flex_database_config.yml
similarity index 83%
rename from service_specs/postgres-flex/alpha/database_config.yml
rename to service_specs/postgres-flex_database_config.yml
index c7bb4e29..8211b1a7 100644
--- a/service_specs/postgres-flex/alpha/database_config.yml
+++ b/service_specs/postgres-flex_database_config.yml
@@ -1,3 +1,4 @@
+
provider:
name: stackitprivatepreview
@@ -17,11 +18,6 @@ resources:
method: DELETE
data_sources:
- database:
- read:
- path: /v3alpha1/projects/{projectId}/regions/{region}/instances/{instanceId}/databases/{databaseId}
- method: GET
-
databases:
read:
path: /v3alpha1/projects/{projectId}/regions/{region}/instances/{instanceId}/databases
diff --git a/service_specs/postgres-flex/alpha/flavors_config.yml b/service_specs/postgres-flex_flavors_config.yml
similarity index 100%
rename from service_specs/postgres-flex/alpha/flavors_config.yml
rename to service_specs/postgres-flex_flavors_config.yml
diff --git a/service_specs/postgres-flex/alpha/instance_config.yml b/service_specs/postgres-flex_instance_config.yml
similarity index 100%
rename from service_specs/postgres-flex/alpha/instance_config.yml
rename to service_specs/postgres-flex_instance_config.yml
diff --git a/service_specs/postgres-flex/alpha/role_config.yml b/service_specs/postgres-flex_role_config.yml
similarity index 100%
rename from service_specs/postgres-flex/alpha/role_config.yml
rename to service_specs/postgres-flex_role_config.yml
diff --git a/service_specs/postgres-flex/alpha/user_config.yml b/service_specs/postgres-flex_user_config.yml
similarity index 100%
rename from service_specs/postgres-flex/alpha/user_config.yml
rename to service_specs/postgres-flex_user_config.yml
diff --git a/service_specs/postgres-flex/alpha/version_config.yml b/service_specs/postgres-flex_version_config.yml
similarity index 100%
rename from service_specs/postgres-flex/alpha/version_config.yml
rename to service_specs/postgres-flex_version_config.yml
diff --git a/service_specs/sqlserverflex/beta/backup_config.yml.disabled b/service_specs/sqlserverflex/beta/backup_config.yml.disabled
deleted file mode 100644
index 7df5fc4b..00000000
--- a/service_specs/sqlserverflex/beta/backup_config.yml.disabled
+++ /dev/null
@@ -1,13 +0,0 @@
-provider:
- name: stackitprivatepreview
-
-data_sources:
- backups:
- read:
- path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/backups
- method: GET
-
- backup:
- read:
- path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/backups/{backupId}
- method: GET
diff --git a/service_specs/sqlserverflex/beta/collation_config.yml.disabled b/service_specs/sqlserverflex/beta/collation_config.yml.disabled
deleted file mode 100644
index d1160ec3..00000000
--- a/service_specs/sqlserverflex/beta/collation_config.yml.disabled
+++ /dev/null
@@ -1,8 +0,0 @@
-provider:
- name: stackitprivatepreview
-
-data_sources:
- collation:
- read:
- path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/collations
- method: GET
diff --git a/service_specs/sqlserverflex/beta/database_config.yml b/service_specs/sqlserverflex/beta/database_config.yml
deleted file mode 100644
index 135010d2..00000000
--- a/service_specs/sqlserverflex/beta/database_config.yml
+++ /dev/null
@@ -1,33 +0,0 @@
-provider:
- name: stackitprivatepreview
-
-resources:
- database:
- schema:
- attributes:
- aliases:
- databaseId: id
- create:
- path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/databases
- method: POST
- read:
- path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/databases/{databaseName}
- method: GET
- delete:
- path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/databases/{databaseName}
- method: DELETE
-
-data_sources:
- databases:
- read:
- path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/databases
- method: GET
-
- database:
- schema:
- attributes:
- aliases:
- databaseId: id
- read:
- path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/databases/{databaseName}
- method: GET
diff --git a/service_specs/sqlserverflex/beta/flavors_config.yml b/service_specs/sqlserverflex/beta/flavors_config.yml
deleted file mode 100644
index 4b985a4c..00000000
--- a/service_specs/sqlserverflex/beta/flavors_config.yml
+++ /dev/null
@@ -1,9 +0,0 @@
-
-provider:
- name: stackitprivatepreview
-
-data_sources:
- flavors:
- read:
- path: /v3beta1/projects/{projectId}/regions/{region}/flavors
- method: GET
diff --git a/service_specs/sqlserverflex/beta/instance_config.yml b/service_specs/sqlserverflex/beta/instance_config.yml
deleted file mode 100644
index cea25959..00000000
--- a/service_specs/sqlserverflex/beta/instance_config.yml
+++ /dev/null
@@ -1,28 +0,0 @@
-provider:
- name: stackitprivatepreview
-
-resources:
- instance:
- create:
- path: /v3beta1/projects/{projectId}/regions/{region}/instances
- method: POST
- read:
- path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}
- method: GET
- update:
- path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}
- method: PUT
- delete:
- path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}
- method: DELETE
-
-data_sources:
- instances:
- read:
- path: /v3beta1/projects/{projectId}/regions/{region}/instances
- method: GET
-
- instance:
- read:
- path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}
- method: GET
diff --git a/service_specs/sqlserverflex/beta/user_config.yml b/service_specs/sqlserverflex/beta/user_config.yml
deleted file mode 100644
index bfa9a3a7..00000000
--- a/service_specs/sqlserverflex/beta/user_config.yml
+++ /dev/null
@@ -1,24 +0,0 @@
-
-provider:
- name: stackitprivatepreview
-
-resources:
- user:
- create:
- path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/users
- method: POST
- read:
- path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/users/{userId}
- method: GET
- update:
- path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/users/{userId}
- method: PUT
- delete:
- path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/users/{userId}
- method: DELETE
-
-data_sources:
- user:
- read:
- path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/users
- method: GET
diff --git a/service_specs/sqlserverflex/beta/versions_config.yml b/service_specs/sqlserverflex/beta/versions_config.yml
deleted file mode 100644
index 70d79676..00000000
--- a/service_specs/sqlserverflex/beta/versions_config.yml
+++ /dev/null
@@ -1,9 +0,0 @@
-
-provider:
- name: stackitprivatepreview
-
-data_sources:
- version:
- read:
- path: /v3beta1/projects/{projectId}/regions/{region}/versions
- method: GET
diff --git a/service_specs/sqlserverflex/generator_settings.yml b/service_specs/sqlserverflex/generator_settings.yml
deleted file mode 100644
index 1f92f640..00000000
--- a/service_specs/sqlserverflex/generator_settings.yml
+++ /dev/null
@@ -1,5 +0,0 @@
-versions:
- - name: alpha
- path: v3alpha1
- - name: beta
- path: v3beta1
diff --git a/service_specs/sqlserverflex/alpha/backup_config.yml.disabled b/service_specs/sqlserverflex_backup_config.yml.disabled
similarity index 100%
rename from service_specs/sqlserverflex/alpha/backup_config.yml.disabled
rename to service_specs/sqlserverflex_backup_config.yml.disabled
diff --git a/service_specs/sqlserverflex/alpha/collation_config.yml.bak b/service_specs/sqlserverflex_collation_config.yml.disabled
similarity index 92%
rename from service_specs/sqlserverflex/alpha/collation_config.yml.bak
rename to service_specs/sqlserverflex_collation_config.yml.disabled
index 9ebfe5b4..9cb13c19 100644
--- a/service_specs/sqlserverflex/alpha/collation_config.yml.bak
+++ b/service_specs/sqlserverflex_collation_config.yml.disabled
@@ -2,7 +2,7 @@ provider:
name: stackitprivatepreview
data_sources:
- collations:
+ collation:
read:
path: /v3alpha1/projects/{projectId}/regions/{region}/instances/{instanceId}/collations
method: GET
diff --git a/service_specs/sqlserverflex/alpha/database_config.yml b/service_specs/sqlserverflex_database_config.yml
similarity index 92%
rename from service_specs/sqlserverflex/alpha/database_config.yml
rename to service_specs/sqlserverflex_database_config.yml
index cd592e80..e8ea6ef9 100644
--- a/service_specs/sqlserverflex/alpha/database_config.yml
+++ b/service_specs/sqlserverflex_database_config.yml
@@ -1,8 +1,13 @@
+
provider:
name: stackitprivatepreview
resources:
database:
+ schema:
+ attributes:
+ aliases:
+ id: database_id
create:
path: /v3alpha1/projects/{projectId}/regions/{region}/instances/{instanceId}/databases
method: POST
@@ -12,10 +17,6 @@ resources:
delete:
path: /v3alpha1/projects/{projectId}/regions/{region}/instances/{instanceId}/databases/{databaseName}
method: DELETE
- schema:
- attributes:
- aliases:
- id: databaseId
data_sources:
@@ -25,10 +26,9 @@ data_sources:
method: GET
database:
+ attributes:
+ aliases:
+ id: database_id
read:
path: /v3alpha1/projects/{projectId}/regions/{region}/instances/{instanceId}/databases/{databaseName}
method: GET
- schema:
- attributes:
- aliases:
- id: database_id
diff --git a/service_specs/sqlserverflex/alpha/flavors_config.yml b/service_specs/sqlserverflex_flavors_config.yml
similarity index 100%
rename from service_specs/sqlserverflex/alpha/flavors_config.yml
rename to service_specs/sqlserverflex_flavors_config.yml
diff --git a/service_specs/sqlserverflex/alpha/instance_config.yml b/service_specs/sqlserverflex_instance_config.yml
similarity index 85%
rename from service_specs/sqlserverflex/alpha/instance_config.yml
rename to service_specs/sqlserverflex_instance_config.yml
index c7ae4c2a..bef39890 100644
--- a/service_specs/sqlserverflex/alpha/instance_config.yml
+++ b/service_specs/sqlserverflex_instance_config.yml
@@ -1,3 +1,4 @@
+
provider:
name: stackitprivatepreview
@@ -17,11 +18,6 @@ resources:
method: DELETE
data_sources:
- instances:
- read:
- path: /v3alpha1/projects/{projectId}/regions/{region}/instances
- method: GET
-
instance:
read:
path: /v3alpha1/projects/{projectId}/regions/{region}/instances/{instanceId}
diff --git a/service_specs/sqlserverflex/alpha/user_config.yml b/service_specs/sqlserverflex_user_config.yml
similarity index 100%
rename from service_specs/sqlserverflex/alpha/user_config.yml
rename to service_specs/sqlserverflex_user_config.yml
diff --git a/service_specs/sqlserverflex/alpha/version_config.yml.bak b/service_specs/sqlserverflex_version_config.yml
similarity index 92%
rename from service_specs/sqlserverflex/alpha/version_config.yml.bak
rename to service_specs/sqlserverflex_version_config.yml
index 937dccd5..3a3f982d 100644
--- a/service_specs/sqlserverflex/alpha/version_config.yml.bak
+++ b/service_specs/sqlserverflex_version_config.yml
@@ -3,7 +3,7 @@ provider:
name: stackitprivatepreview
data_sources:
- versions:
+ version:
read:
path: /v3alpha1/projects/{projectId}/regions/{region}/versions
method: GET
diff --git a/stackit/internal/conversion/conversion.go b/stackit/internal/conversion/conversion.go
index 48871213..cd4c3bfa 100644
--- a/stackit/internal/conversion/conversion.go
+++ b/stackit/internal/conversion/conversion.go
@@ -11,7 +11,6 @@ import (
"github.com/hashicorp/terraform-plugin-framework/attr"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
-
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
)
diff --git a/stackit/internal/conversion/conversion_test.go b/stackit/internal/conversion/conversion_test.go
index ac5f4535..5e6c2445 100644
--- a/stackit/internal/conversion/conversion_test.go
+++ b/stackit/internal/conversion/conversion_test.go
@@ -8,7 +8,6 @@ import (
"testing"
"github.com/hashicorp/terraform-plugin-framework/diag"
-
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
"github.com/google/go-cmp/cmp"
diff --git a/stackit/internal/core/core.go b/stackit/internal/core/core.go
index 3680ae65..d3ea252c 100644
--- a/stackit/internal/core/core.go
+++ b/stackit/internal/core/core.go
@@ -32,7 +32,7 @@ const (
type EphemeralProviderData struct {
ProviderData
- PrivateKey string //nolint:gosec //this is a placeholder and not used in this code
+ PrivateKey string
PrivateKeyPath string
ServiceAccountKey string
ServiceAccountKeyPath string
@@ -105,13 +105,11 @@ func DiagsToError(diags diag.Diagnostics) error {
diagsError := diags.Errors()
diagsStrings := make([]string, 0)
for _, diagnostic := range diagsError {
- diagsStrings = append(
- diagsStrings, fmt.Sprintf(
- "(%s) %s",
- diagnostic.Summary(),
- diagnostic.Detail(),
- ),
- )
+ diagsStrings = append(diagsStrings, fmt.Sprintf(
+ "(%s) %s",
+ diagnostic.Summary(),
+ diagnostic.Detail(),
+ ))
}
return fmt.Errorf("%s", strings.Join(diagsStrings, ";"))
}
@@ -138,22 +136,14 @@ func LogAndAddWarning(ctx context.Context, diags *diag.Diagnostics, summary, det
func LogAndAddWarningBeta(ctx context.Context, diags *diag.Diagnostics, name string, resourceType ResourceType) {
warnTitle := fmt.Sprintf("The %s %q is in beta", resourceType, name)
- warnContent := fmt.Sprintf(
- "The %s %q is in beta and may be subject to breaking changes in the future. Use with caution.",
- resourceType,
- name,
- )
+ warnContent := fmt.Sprintf("The %s %q is in beta and may be subject to breaking changes in the future. Use with caution.", resourceType, name)
tflog.Warn(ctx, fmt.Sprintf("%s | %s", warnTitle, warnContent))
diags.AddWarning(warnTitle, warnContent)
}
func LogAndAddErrorBeta(ctx context.Context, diags *diag.Diagnostics, name string, resourceType ResourceType) {
errTitle := fmt.Sprintf("The %s %q is in beta and beta is not enabled", resourceType, name)
- errContent := fmt.Sprintf(
- `The %s %q is in beta and the beta functionality is currently not enabled. To enable it, set the environment variable STACKIT_TF_ENABLE_BETA_RESOURCES to "true" or set the "enable_beta_resources" provider field to true.`,
- resourceType,
- name,
- )
+ errContent := fmt.Sprintf(`The %s %q is in beta and the beta functionality is currently not enabled. To enable it, set the environment variable STACKIT_TF_ENABLE_BETA_RESOURCES to "true" or set the "enable_beta_resources" provider field to true.`, resourceType, name)
tflog.Error(ctx, fmt.Sprintf("%s | %s", errTitle, errContent))
diags.AddError(errTitle, errContent)
}
@@ -171,10 +161,8 @@ func LogResponse(ctx context.Context) context.Context {
traceId := runtime.GetTraceId(ctx)
ctx = tflog.SetField(ctx, "x-trace-id", traceId)
- tflog.Info(
- ctx, "response data", map[string]interface{}{
- "x-trace-id": traceId,
- },
- )
+ tflog.Info(ctx, "response data", map[string]interface{}{
+ "x-trace-id": traceId,
+ })
return ctx
}
diff --git a/stackit/internal/core/retry_round_tripper.go b/stackit/internal/core/retry_round_tripper.go
deleted file mode 100644
index 568be431..00000000
--- a/stackit/internal/core/retry_round_tripper.go
+++ /dev/null
@@ -1,237 +0,0 @@
-package core
-
-import (
- "context"
- "crypto/rand"
- "errors"
- "fmt"
- "math/big"
- "net/http"
- "time"
-
- "github.com/hashicorp/terraform-plugin-log/tflog"
-)
-
-const (
- // backoffMultiplier is the factor by which the delay is multiplied for exponential backoff.
- backoffMultiplier = 2
- // jitterFactor is the divisor used to calculate jitter (e.g., half of the base delay).
- jitterFactor = 2
-)
-
-var (
- // ErrRequestFailedAfterRetries is returned when a request fails after all retry attempts.
- ErrRequestFailedAfterRetries = errors.New("request failed after all retry attempts")
-)
-
-// RetryRoundTripper implements an http.RoundTripper that adds automatic retry logic for failed requests.
-type RetryRoundTripper struct {
- next http.RoundTripper
- maxRetries int
- initialDelay time.Duration
- maxDelay time.Duration
- perTryTimeout time.Duration
-}
-
-// NewRetryRoundTripper creates a new instance of the RetryRoundTripper with the specified configuration.
-func NewRetryRoundTripper(
- next http.RoundTripper,
- maxRetries int,
- initialDelay, maxDelay, perTryTimeout time.Duration,
-) *RetryRoundTripper {
- return &RetryRoundTripper{
- next: next,
- maxRetries: maxRetries,
- initialDelay: initialDelay,
- maxDelay: maxDelay,
- perTryTimeout: perTryTimeout,
- }
-}
-
-// RoundTrip executes the request and retries on failure.
-func (rrt *RetryRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) {
- resp, err := rrt.executeRequest(req)
- if !rrt.shouldRetry(resp, err) {
- if err != nil {
- return resp, fmt.Errorf("initial request failed, not retrying: %w", err)
- }
-
- return resp, nil
- }
-
- return rrt.retryLoop(req, resp, err)
-}
-
-// executeRequest performs a single HTTP request with a per-try timeout.
-func (rrt *RetryRoundTripper) executeRequest(req *http.Request) (*http.Response, error) {
- ctx, cancel := context.WithTimeout(req.Context(), rrt.perTryTimeout)
- defer cancel()
-
- resp, err := rrt.next.RoundTrip(req.WithContext(ctx))
- if err != nil {
- if errors.Is(err, context.DeadlineExceeded) {
- return resp, fmt.Errorf("per-try timeout of %v exceeded: %w", rrt.perTryTimeout, err)
- }
-
- return resp, fmt.Errorf("http roundtrip failed: %w", err)
- }
-
- return resp, nil
-}
-
-// retryLoop handles the retry logic for a failed request.
-func (rrt *RetryRoundTripper) retryLoop(
- req *http.Request,
- initialResp *http.Response,
- initialErr error,
-) (*http.Response, error) {
- var (
- lastErr = initialErr
- resp = initialResp
- currentDelay = rrt.initialDelay
- )
-
- ctx := req.Context()
-
- for attempt := 1; attempt <= rrt.maxRetries; attempt++ {
- rrt.logRetryAttempt(ctx, attempt, currentDelay, lastErr)
-
- waitDuration := rrt.calculateWaitDurationWithJitter(ctx, currentDelay)
- if err := rrt.waitForDelay(ctx, waitDuration); err != nil {
- return nil, err // Context was canceled during wait.
- }
-
- // Exponential backoff for the next potential retry.
- currentDelay = rrt.updateCurrentDelay(currentDelay)
-
- // Retry attempt.
- resp, lastErr = rrt.executeRequest(req)
- if !rrt.shouldRetry(resp, lastErr) {
- if lastErr != nil {
- return resp, fmt.Errorf("request failed on retry attempt %d: %w", attempt, lastErr)
- }
-
- return resp, nil
- }
- }
-
- return nil, rrt.handleFinalError(ctx, resp, lastErr)
-}
-
-// logRetryAttempt logs the details of a retry attempt.
-func (rrt *RetryRoundTripper) logRetryAttempt(
- ctx context.Context,
- attempt int,
- delay time.Duration,
- err error,
-) {
- tflog.Info(
- ctx, "Request failed, retrying...", map[string]interface{}{
- "attempt": attempt,
- "max_attempts": rrt.maxRetries,
- "delay": delay,
- "error": err,
- },
- )
-}
-
-// updateCurrentDelay calculates the next delay for exponential backoff.
-func (rrt *RetryRoundTripper) updateCurrentDelay(currentDelay time.Duration) time.Duration {
- currentDelay *= backoffMultiplier
- if currentDelay > rrt.maxDelay {
- return rrt.maxDelay
- }
-
- return currentDelay
-}
-
-// handleFinalError constructs and returns the final error after all retries have been exhausted.
-func (rrt *RetryRoundTripper) handleFinalError(
- ctx context.Context,
- resp *http.Response,
- lastErr error,
-) error {
- if resp != nil {
- if err := resp.Body.Close(); err != nil {
- tflog.Warn(
- ctx, "Failed to close response body", map[string]interface{}{
- "error": err.Error(),
- },
- )
- }
- }
-
- if lastErr != nil {
- return fmt.Errorf("%w: %w", ErrRequestFailedAfterRetries, lastErr)
- }
-
- // This case occurs if shouldRetry was true due to a retryable status code,
- // but all retries failed with similar status codes.
- if resp != nil {
- return fmt.Errorf(
- "%w: last retry attempt failed with status code %d",
- ErrRequestFailedAfterRetries,
- resp.StatusCode,
- )
- }
-
- return fmt.Errorf("%w: no response received", ErrRequestFailedAfterRetries)
-}
-
-// shouldRetry determines if a request should be retried based on the response or an error.
-func (rrt *RetryRoundTripper) shouldRetry(resp *http.Response, err error) bool {
- if err != nil {
- return true
- }
-
- if resp != nil {
- if resp.StatusCode == http.StatusBadGateway ||
- resp.StatusCode == http.StatusServiceUnavailable ||
- resp.StatusCode == http.StatusGatewayTimeout {
- return true
- }
- }
-
- return false
-}
-
-// calculateWaitDurationWithJitter calculates the backoff duration for the next retry,
-// adding a random jitter to prevent thundering herd issues.
-func (rrt *RetryRoundTripper) calculateWaitDurationWithJitter(
- ctx context.Context,
- baseDelay time.Duration,
-) time.Duration {
- if baseDelay <= 0 {
- return 0
- }
-
- maxJitter := int64(baseDelay / jitterFactor)
- if maxJitter <= 0 {
- return baseDelay
- }
-
- random, err := rand.Int(rand.Reader, big.NewInt(maxJitter))
- if err != nil {
- tflog.Warn(
- ctx, "Failed to generate random jitter, proceeding without it.", map[string]interface{}{
- "error": err.Error(),
- },
- )
-
- return baseDelay
- }
-
- jitter := time.Duration(random.Int64())
-
- return baseDelay + jitter
-}
-
-// waitForDelay pauses execution for a given duration or until the context is canceled.
-func (rrt *RetryRoundTripper) waitForDelay(ctx context.Context, delay time.Duration) error {
- select {
- case <-ctx.Done():
- return fmt.Errorf("context canceled during backoff wait: %w", ctx.Err())
- case <-time.After(delay):
- return nil
- }
-}
diff --git a/stackit/internal/core/retry_round_tripper_test.go b/stackit/internal/core/retry_round_tripper_test.go
deleted file mode 100644
index ac84db8b..00000000
--- a/stackit/internal/core/retry_round_tripper_test.go
+++ /dev/null
@@ -1,252 +0,0 @@
-package core
-
-import (
- "context"
- "errors"
- "io"
- "net/http"
- "net/http/httptest"
- "strings"
- "sync/atomic"
- "testing"
- "time"
-)
-
-type mockRoundTripper struct {
- roundTripFunc func(req *http.Request) (*http.Response, error)
- callCount int32
-}
-
-func (m *mockRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) {
- atomic.AddInt32(&m.callCount, 1)
-
- return m.roundTripFunc(req)
-}
-
-func (m *mockRoundTripper) CallCount() int32 {
- return atomic.LoadInt32(&m.callCount)
-}
-
-func TestRetryRoundTripper_RoundTrip(t *testing.T) {
- t.Parallel()
-
- testRetryConfig := func(next http.RoundTripper) *RetryRoundTripper {
- return NewRetryRoundTripper(
- next,
- 3,
- 1*time.Millisecond,
- 10*time.Millisecond,
- 50*time.Millisecond,
- )
- }
-
- noRetryTests := []struct {
- name string
- mockStatusCode int
- expectedStatusCode int
- }{
- {
- name: "should succeed on the first try",
- mockStatusCode: http.StatusOK,
- expectedStatusCode: http.StatusOK,
- },
- {
- name: "should not retry on a non-retryable status code like 400",
- mockStatusCode: http.StatusBadRequest,
- expectedStatusCode: http.StatusBadRequest,
- },
- }
-
- for _, testCase := range noRetryTests {
- t.Run(
- testCase.name, func(t *testing.T) {
- t.Parallel()
-
- mock := &mockRoundTripper{
- roundTripFunc: func(req *http.Request) (*http.Response, error) {
- return &http.Response{
- StatusCode: testCase.mockStatusCode,
- Body: io.NopCloser(nil),
- Request: req,
- }, nil
- },
- }
- tripper := testRetryConfig(mock)
- req := httptest.NewRequest(http.MethodGet, "/", http.NoBody)
-
- resp, err := tripper.RoundTrip(req)
- if resp != nil {
- defer func() {
- if closeErr := resp.Body.Close(); closeErr != nil {
- t.Errorf("failed to close response body: %v", closeErr)
- }
- }()
- }
-
- if err != nil {
- t.Fatalf("expected no error, got %v", err)
- }
- if resp.StatusCode != testCase.expectedStatusCode {
- t.Fatalf("expected status code %d, got %d", testCase.expectedStatusCode, resp.StatusCode)
- }
- if mock.CallCount() != 1 {
- t.Fatalf("expected 1 call, got %d", mock.CallCount())
- }
- },
- )
- }
-
- t.Run(
- "should retry on retryable status code (503) and eventually fail", func(t *testing.T) {
- t.Parallel()
-
- mock := &mockRoundTripper{
- roundTripFunc: func(req *http.Request) (*http.Response, error) {
- return &http.Response{
- StatusCode: http.StatusServiceUnavailable,
- Body: io.NopCloser(nil),
- Request: req,
- }, nil
- },
- }
- tripper := testRetryConfig(mock)
- req := httptest.NewRequest(http.MethodGet, "/", http.NoBody)
-
- resp, err := tripper.RoundTrip(req)
- if resp != nil {
- defer func() {
- if closeErr := resp.Body.Close(); closeErr != nil {
- t.Errorf("failed to close response body: %v", closeErr)
- }
- }()
- }
-
- if err == nil {
- t.Fatal("expected an error, but got nil")
- }
- expectedErrorMsg := "last retry attempt failed with status code 503"
- if !strings.Contains(err.Error(), expectedErrorMsg) {
- t.Fatalf("expected error to contain %q, got %q", expectedErrorMsg, err.Error())
- }
- if mock.CallCount() != 4 { // 1 initial + 3 retries
- t.Fatalf("expected 4 calls, got %d", mock.CallCount())
- }
- },
- )
-
- t.Run(
- "should succeed after one retry", func(t *testing.T) {
- t.Parallel()
-
- mock := &mockRoundTripper{}
- mock.roundTripFunc = func(req *http.Request) (*http.Response, error) {
- if mock.CallCount() < 2 {
- return &http.Response{
- StatusCode: http.StatusServiceUnavailable,
- Body: io.NopCloser(nil),
- Request: req,
- }, nil
- }
-
- return &http.Response{
- StatusCode: http.StatusOK,
- Body: io.NopCloser(nil),
- Request: req,
- }, nil
- }
- tripper := testRetryConfig(mock)
- req := httptest.NewRequest(http.MethodGet, "/", http.NoBody)
-
- resp, err := tripper.RoundTrip(req)
- if resp != nil {
- defer func() {
- if closeErr := resp.Body.Close(); closeErr != nil {
- t.Errorf("failed to close response body: %v", closeErr)
- }
- }()
- }
-
- if err != nil {
- t.Fatalf("expected no error, got %v", err)
- }
- if resp.StatusCode != http.StatusOK {
- t.Fatalf("expected status code %d, got %d", http.StatusOK, resp.StatusCode)
- }
- if mock.CallCount() != 2 {
- t.Fatalf("expected 2 calls, got %d", mock.CallCount())
- }
- },
- )
-
- t.Run(
- "should retry on network error", func(t *testing.T) {
- t.Parallel()
-
- mockErr := errors.New("simulated network error")
-
- mock := &mockRoundTripper{
- roundTripFunc: func(_ *http.Request) (*http.Response, error) {
- return nil, mockErr
- },
- }
- tripper := testRetryConfig(mock)
- req := httptest.NewRequest(http.MethodGet, "/", http.NoBody)
-
- resp, err := tripper.RoundTrip(req)
- if resp != nil {
- defer func() {
- if closeErr := resp.Body.Close(); closeErr != nil {
- t.Errorf("failed to close response body: %v", closeErr)
- }
- }()
- }
-
- if !errors.Is(err, mockErr) {
- t.Fatalf("expected error to be %v, got %v", mockErr, err)
- }
- if mock.CallCount() != 4 { // 1 initial + 3 retries
- t.Fatalf("expected 4 calls, got %d", mock.CallCount())
- }
- },
- )
-
- t.Run(
- "should abort retries if the main context is canceled", func(t *testing.T) {
- t.Parallel()
-
- mock := &mockRoundTripper{
- roundTripFunc: func(req *http.Request) (*http.Response, error) {
- select {
- case <-time.After(100 * time.Millisecond):
- return nil, errors.New("this should not be returned")
- case <-req.Context().Done():
- return nil, req.Context().Err()
- }
- },
- }
- tripper := testRetryConfig(mock)
- baseCtx := context.Background()
-
- ctx, cancel := context.WithTimeout(baseCtx, 20*time.Millisecond)
- defer cancel()
-
- req := httptest.NewRequest(http.MethodGet, "/", http.NoBody).WithContext(ctx)
-
- resp, err := tripper.RoundTrip(req)
- if resp != nil {
- defer func() {
- if closeErr := resp.Body.Close(); closeErr != nil {
- t.Errorf("failed to close response body: %v", closeErr)
- }
- }()
- }
-
- if !errors.Is(err, context.DeadlineExceeded) {
- t.Fatalf("expected error to be context.DeadlineExceeded, got %v", err)
- }
- if mock.CallCount() != 1 {
- t.Fatalf("expected 1 call, got %d", mock.CallCount())
- }
- },
- )
-}
diff --git a/stackit/internal/features/beta.go b/stackit/internal/features/beta.go
index ab74e554..781ac8c0 100644
--- a/stackit/internal/features/beta.go
+++ b/stackit/internal/features/beta.go
@@ -9,7 +9,6 @@ import (
"strings"
"github.com/hashicorp/terraform-plugin-framework/diag"
-
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
)
diff --git a/stackit/internal/features/beta_test.go b/stackit/internal/features/beta_test.go
index 366158f8..83fb2f99 100644
--- a/stackit/internal/features/beta_test.go
+++ b/stackit/internal/features/beta_test.go
@@ -7,7 +7,6 @@ import (
"testing"
"github.com/hashicorp/terraform-plugin-framework/diag"
-
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
)
diff --git a/stackit/internal/features/experiments.go b/stackit/internal/features/experiments.go
index 2230a7b5..b68399ed 100644
--- a/stackit/internal/features/experiments.go
+++ b/stackit/internal/features/experiments.go
@@ -10,7 +10,6 @@ import (
"github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/hashicorp/terraform-plugin-log/tflog"
-
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
)
diff --git a/stackit/internal/features/experiments_test.go b/stackit/internal/features/experiments_test.go
index 771a8444..06423a4f 100644
--- a/stackit/internal/features/experiments_test.go
+++ b/stackit/internal/features/experiments_test.go
@@ -7,7 +7,6 @@ import (
"testing"
"github.com/hashicorp/terraform-plugin-framework/diag"
-
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
)
diff --git a/stackit/internal/services/postgresflexalpha/database/datasource.go b/stackit/internal/services/postgresflexalpha/database/datasource.go
index 9a0af3cd..36fc5333 100644
--- a/stackit/internal/services/postgresflexalpha/database/datasource.go
+++ b/stackit/internal/services/postgresflexalpha/database/datasource.go
@@ -5,19 +5,19 @@ import (
"fmt"
"net/http"
- "github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-log/tflog"
-
- "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
-
+ "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- pgDsGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/datasources_gen"
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
+
+ "github.com/hashicorp/terraform-plugin-framework/datasource"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-log/tflog"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/validate"
+
+ "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
)
// Ensure the implementation satisfies the expected interfaces.
@@ -30,15 +30,9 @@ func NewDatabaseDataSource() datasource.DataSource {
return &databaseDataSource{}
}
-// dataSourceModel maps the data source schema data.
-type dataSourceModel struct {
- pgDsGen.DatabaseModel
- TerraformID types.String `tfsdk:"id"`
-}
-
// databaseDataSource is the data source implementation.
type databaseDataSource struct {
- client *v3alpha1api.APIClient
+ client *postgresflexalpha.APIClient
providerData core.ProviderData
}
@@ -72,45 +66,132 @@ func (r *databaseDataSource) Configure(
}
// Schema defines the schema for the data source.
-func (r *databaseDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- s := pgDsGen.DatabaseDataSourceSchema(ctx)
- s.Attributes["id"] = schema.StringAttribute{
- Description: "Terraform's internal resource ID. It is structured as \\\"`project_id`,`region`,`instance_id`," +
- "`database_id`\\\".\",",
- Computed: true,
+func (r *databaseDataSource) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
+ descriptions := map[string]string{
+ "main": "Postgres Flex database resource schema. Must have a `region` specified in the provider configuration.",
+ "id": "Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`,`database_id`\".",
+ "database_id": "Database ID.",
+ "instance_id": "ID of the Postgres Flex instance.",
+ "project_id": "STACKIT project ID to which the instance is associated.",
+ "name": "Database name.",
+ "owner": "Username of the database owner.",
+ "region": "The resource region. If not defined, the provider region is used.",
}
- resp.Schema = s
+ resp.Schema = schema.Schema{
+ Description: descriptions["main"],
+ Attributes: map[string]schema.Attribute{
+ "id": schema.StringAttribute{
+ Description: descriptions["id"],
+ Computed: true,
+ },
+ "database_id": schema.Int64Attribute{
+ Description: descriptions["database_id"],
+ Optional: true,
+ Computed: true,
+ },
+ "instance_id": schema.StringAttribute{
+ Description: descriptions["instance_id"],
+ Required: true,
+ Validators: []validator.String{
+ validate.UUID(),
+ validate.NoSeparator(),
+ },
+ },
+ "project_id": schema.StringAttribute{
+ Description: descriptions["project_id"],
+ Required: true,
+ Validators: []validator.String{
+ validate.UUID(),
+ validate.NoSeparator(),
+ },
+ },
+ "name": schema.StringAttribute{
+ Description: descriptions["name"],
+ Optional: true,
+ Computed: true,
+ Validators: []validator.String{
+ stringvalidator.LengthAtLeast(1),
+ },
+ },
+ "owner": schema.StringAttribute{
+ Description: descriptions["owner"],
+ Computed: true,
+ },
+ "region": schema.StringAttribute{
+ // the region cannot be found, so it has to be passed
+ Optional: true,
+ Description: descriptions["region"],
+ },
+ },
+ }
}
-// Read fetches the data for the data source.
+// Read refreshes the Terraform state with the latest data.
func (r *databaseDataSource) Read(
ctx context.Context,
req datasource.ReadRequest,
resp *datasource.ReadResponse,
) { // nolint:gocritic // function signature required by Terraform
- var model dataSourceModel
+ var model Model
diags := req.Config.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
+ // validation for exactly one of database_id or name
+ isIdSet := !model.DatabaseId.IsNull() && !model.DatabaseId.IsUnknown()
+ isNameSet := !model.Name.IsNull() && !model.Name.IsUnknown()
+
+ if (isIdSet && isNameSet) || (!isIdSet && !isNameSet) {
+ core.LogAndAddError(
+ ctx, &resp.Diagnostics,
+ "Invalid configuration", "Exactly one of 'database_id' or 'name' must be specified.",
+ )
+ return
+ }
+
ctx = core.InitProviderContext(ctx)
projectId := model.ProjectId.ValueString()
instanceId := model.InstanceId.ValueString()
+ databaseId := model.DatabaseId.ValueInt64()
region := r.providerData.GetRegionWithOverride(model.Region)
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "instance_id", instanceId)
+ ctx = tflog.SetField(ctx, "database_id", databaseId)
ctx = tflog.SetField(ctx, "region", region)
- databaseResp, err := r.getDatabaseByNameOrID(ctx, &model, projectId, region, instanceId, &resp.Diagnostics)
- if resp.Diagnostics.HasError() {
- return
+ var databaseResp *postgresflexalpha.ListDatabase
+ var err error
+
+ if isIdSet {
+ databaseId := model.DatabaseId.ValueInt64()
+ ctx = tflog.SetField(ctx, "database_id", databaseId)
+ databaseResp, err = getDatabaseById(ctx, r.client, projectId, region, instanceId, databaseId)
+ } else {
+ databaseName := model.Name.ValueString()
+ ctx = tflog.SetField(ctx, "name", databaseName)
+ databaseResp, err = getDatabaseByName(ctx, r.client, projectId, region, instanceId, databaseName)
}
+
if err != nil {
- handleReadError(ctx, &resp.Diagnostics, err, projectId, instanceId)
+ utils.LogError(
+ ctx,
+ &resp.Diagnostics,
+ err,
+ "Reading database",
+ fmt.Sprintf(
+ "Database with ID %q or instance with ID %q does not exist in project %q.",
+ databaseId,
+ instanceId,
+ projectId,
+ ),
+ map[int]string{
+ http.StatusForbidden: fmt.Sprintf("Project with ID %q not found or forbidden access", projectId),
+ },
+ )
resp.State.RemoveResource(ctx)
return
}
@@ -137,60 +218,3 @@ func (r *databaseDataSource) Read(
}
tflog.Info(ctx, "Postgres Flex database read")
}
-
-// getDatabaseByNameOrID retrieves a single database by ensuring either a unique ID or name is provided.
-func (r *databaseDataSource) getDatabaseByNameOrID(
- ctx context.Context,
- model *dataSourceModel,
- projectId, region, instanceId string,
- diags *diag.Diagnostics,
-) (*v3alpha1api.ListDatabase, error) {
- isIdSet := !model.DatabaseId.IsNull() && !model.DatabaseId.IsUnknown()
- isNameSet := !model.Name.IsNull() && !model.Name.IsUnknown()
-
- if (isIdSet && isNameSet) || (!isIdSet && !isNameSet) {
- diags.AddError(
- "Invalid configuration",
- "Exactly one of 'id' or 'name' must be specified.",
- )
- return nil, nil
- }
-
- if isIdSet {
- databaseId := model.DatabaseId.ValueInt64()
- ctx = tflog.SetField(ctx, "database_id", databaseId)
- return getDatabaseById(ctx, r.client.DefaultAPI, projectId, region, instanceId, databaseId)
- }
-
- databaseName := model.Name.ValueString()
- ctx = tflog.SetField(ctx, "name", databaseName)
- return getDatabaseByName(ctx, r.client.DefaultAPI, projectId, region, instanceId, databaseName)
-}
-
-// handleReadError centralizes API error handling for the Read operation.
-func handleReadError(ctx context.Context, diags *diag.Diagnostics, err error, projectId, instanceId string) {
- utils.LogError(
- ctx,
- diags,
- err,
- "Reading database",
- fmt.Sprintf(
- "Could not retrieve database for instance %q in project %q.",
- instanceId,
- projectId,
- ),
- map[int]string{
- http.StatusBadRequest: fmt.Sprintf(
- "Invalid request parameters for project %q and instance %q.",
- projectId,
- instanceId,
- ),
- http.StatusNotFound: fmt.Sprintf(
- "Database, instance %q, or project %q not found.",
- instanceId,
- projectId,
- ),
- http.StatusForbidden: fmt.Sprintf("Forbidden access to project %q.", projectId),
- },
- )
-}
diff --git a/stackit/internal/services/postgresflexalpha/database/datasources_gen/database_data_source_gen.go b/stackit/internal/services/postgresflexalpha/database/datasources_gen/database_data_source_gen.go
deleted file mode 100644
index d5683a6c..00000000
--- a/stackit/internal/services/postgresflexalpha/database/datasources_gen/database_data_source_gen.go
+++ /dev/null
@@ -1,69 +0,0 @@
-// Code generated by terraform-plugin-framework-generator DO NOT EDIT.
-
-package postgresflexalpha
-
-import (
- "context"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "github.com/hashicorp/terraform-plugin-framework/types"
-
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
-)
-
-func DatabaseDataSourceSchema(ctx context.Context) schema.Schema {
- return schema.Schema{
- Attributes: map[string]schema.Attribute{
- "database_id": schema.Int64Attribute{
- Required: true,
- Description: "The ID of the database.",
- MarkdownDescription: "The ID of the database.",
- },
- "tf_original_api_id": schema.Int64Attribute{
- Computed: true,
- Description: "The id of the database.",
- MarkdownDescription: "The id of the database.",
- },
- "instance_id": schema.StringAttribute{
- Required: true,
- Description: "The ID of the instance.",
- MarkdownDescription: "The ID of the instance.",
- },
- "name": schema.StringAttribute{
- Computed: true,
- Description: "The name of the database.",
- MarkdownDescription: "The name of the database.",
- },
- "owner": schema.StringAttribute{
- Computed: true,
- Description: "The owner of the database.",
- MarkdownDescription: "The owner of the database.",
- },
- "project_id": schema.StringAttribute{
- Required: true,
- Description: "The STACKIT project ID.",
- MarkdownDescription: "The STACKIT project ID.",
- },
- "region": schema.StringAttribute{
- Required: true,
- Description: "The region which should be addressed",
- MarkdownDescription: "The region which should be addressed",
- Validators: []validator.String{
- stringvalidator.OneOf(
- "eu01",
- ),
- },
- },
- },
- }
-}
-
-type DatabaseModel struct {
- DatabaseId types.Int64 `tfsdk:"database_id"`
- Id types.Int64 `tfsdk:"tf_original_api_id"`
- InstanceId types.String `tfsdk:"instance_id"`
- Name types.String `tfsdk:"name"`
- Owner types.String `tfsdk:"owner"`
- ProjectId types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
-}
diff --git a/stackit/internal/services/postgresflexalpha/database/datasources_gen/databases_data_source_gen.go b/stackit/internal/services/postgresflexalpha/database/datasources_gen/databases_data_source_gen.go
index b8bc6010..7e3e1eec 100644
--- a/stackit/internal/services/postgresflexalpha/database/datasources_gen/databases_data_source_gen.go
+++ b/stackit/internal/services/postgresflexalpha/database/datasources_gen/databases_data_source_gen.go
@@ -23,6 +23,11 @@ func DatabasesDataSourceSchema(ctx context.Context) schema.Schema {
"databases": schema.ListNestedAttribute{
NestedObject: schema.NestedAttributeObject{
Attributes: map[string]schema.Attribute{
+ "created": schema.StringAttribute{
+ Computed: true,
+ Description: "The data when the database was created in RFC3339 format.",
+ MarkdownDescription: "The data when the database was created in RFC3339 format.",
+ },
"id": schema.Int64Attribute{
Computed: true,
Description: "The id of the database.",
@@ -121,6 +126,8 @@ func DatabasesDataSourceSchema(ctx context.Context) schema.Schema {
"database_name.asc",
"database_owner.desc",
"database_owner.asc",
+ "index.asc",
+ "index.desc",
),
},
},
@@ -164,6 +171,24 @@ func (t DatabasesType) ValueFromObject(ctx context.Context, in basetypes.ObjectV
attributes := in.Attributes()
+ createdAttribute, ok := attributes["created"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `created is missing from object`)
+
+ return nil, diags
+ }
+
+ createdVal, ok := createdAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`created expected to be basetypes.StringValue, was: %T`, createdAttribute))
+ }
+
idAttribute, ok := attributes["id"]
if !ok {
@@ -223,10 +248,11 @@ func (t DatabasesType) ValueFromObject(ctx context.Context, in basetypes.ObjectV
}
return DatabasesValue{
- Id: idVal,
- Name: nameVal,
- Owner: ownerVal,
- state: attr.ValueStateKnown,
+ Created: createdVal,
+ Id: idVal,
+ Name: nameVal,
+ Owner: ownerVal,
+ state: attr.ValueStateKnown,
}, diags
}
@@ -293,6 +319,24 @@ func NewDatabasesValue(attributeTypes map[string]attr.Type, attributes map[strin
return NewDatabasesValueUnknown(), diags
}
+ createdAttribute, ok := attributes["created"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `created is missing from object`)
+
+ return NewDatabasesValueUnknown(), diags
+ }
+
+ createdVal, ok := createdAttribute.(basetypes.StringValue)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`created expected to be basetypes.StringValue, was: %T`, createdAttribute))
+ }
+
idAttribute, ok := attributes["id"]
if !ok {
@@ -352,10 +396,11 @@ func NewDatabasesValue(attributeTypes map[string]attr.Type, attributes map[strin
}
return DatabasesValue{
- Id: idVal,
- Name: nameVal,
- Owner: ownerVal,
- state: attr.ValueStateKnown,
+ Created: createdVal,
+ Id: idVal,
+ Name: nameVal,
+ Owner: ownerVal,
+ state: attr.ValueStateKnown,
}, diags
}
@@ -427,18 +472,20 @@ func (t DatabasesType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = DatabasesValue{}
type DatabasesValue struct {
- Id basetypes.Int64Value `tfsdk:"id"`
- Name basetypes.StringValue `tfsdk:"name"`
- Owner basetypes.StringValue `tfsdk:"owner"`
- state attr.ValueState
+ Created basetypes.StringValue `tfsdk:"created"`
+ Id basetypes.Int64Value `tfsdk:"id"`
+ Name basetypes.StringValue `tfsdk:"name"`
+ Owner basetypes.StringValue `tfsdk:"owner"`
+ state attr.ValueState
}
func (v DatabasesValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 3)
+ attrTypes := make(map[string]tftypes.Type, 4)
var val tftypes.Value
var err error
+ attrTypes["created"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["id"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["name"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["owner"] = basetypes.StringType{}.TerraformType(ctx)
@@ -447,7 +494,15 @@ func (v DatabasesValue) ToTerraformValue(ctx context.Context) (tftypes.Value, er
switch v.state {
case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 3)
+ vals := make(map[string]tftypes.Value, 4)
+
+ val, err = v.Created.ToTerraformValue(ctx)
+
+ if err != nil {
+ return tftypes.NewValue(objectType, tftypes.UnknownValue), err
+ }
+
+ vals["created"] = val
val, err = v.Id.ToTerraformValue(ctx)
@@ -503,9 +558,10 @@ func (v DatabasesValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValu
var diags diag.Diagnostics
attributeTypes := map[string]attr.Type{
- "id": basetypes.Int64Type{},
- "name": basetypes.StringType{},
- "owner": basetypes.StringType{},
+ "created": basetypes.StringType{},
+ "id": basetypes.Int64Type{},
+ "name": basetypes.StringType{},
+ "owner": basetypes.StringType{},
}
if v.IsNull() {
@@ -519,9 +575,10 @@ func (v DatabasesValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValu
objVal, diags := types.ObjectValue(
attributeTypes,
map[string]attr.Value{
- "id": v.Id,
- "name": v.Name,
- "owner": v.Owner,
+ "created": v.Created,
+ "id": v.Id,
+ "name": v.Name,
+ "owner": v.Owner,
})
return objVal, diags
@@ -542,6 +599,10 @@ func (v DatabasesValue) Equal(o attr.Value) bool {
return true
}
+ if !v.Created.Equal(other.Created) {
+ return false
+ }
+
if !v.Id.Equal(other.Id) {
return false
}
@@ -567,9 +628,10 @@ func (v DatabasesValue) Type(ctx context.Context) attr.Type {
func (v DatabasesValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
- "id": basetypes.Int64Type{},
- "name": basetypes.StringType{},
- "owner": basetypes.StringType{},
+ "created": basetypes.StringType{},
+ "id": basetypes.Int64Type{},
+ "name": basetypes.StringType{},
+ "owner": basetypes.StringType{},
}
}
diff --git a/stackit/internal/services/postgresflexalpha/database/functions.go b/stackit/internal/services/postgresflexalpha/database/functions.go
index 14589e4f..b1c30bb9 100644
--- a/stackit/internal/services/postgresflexalpha/database/functions.go
+++ b/stackit/internal/services/postgresflexalpha/database/functions.go
@@ -3,9 +3,8 @@ package postgresflexalpha
import (
"context"
"fmt"
- "strings"
- "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
+ postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
)
// databaseClientReader represents the contract to listing databases from postgresflex.APIClient.
@@ -15,7 +14,7 @@ type databaseClientReader interface {
projectId string,
region string,
instanceId string,
- ) v3alpha1api.ApiListDatabasesRequestRequest
+ ) postgresflex.ApiListDatabasesRequestRequest
}
// getDatabaseById gets a database by its ID.
@@ -24,9 +23,9 @@ func getDatabaseById(
client databaseClientReader,
projectId, region, instanceId string,
databaseId int64,
-) (*v3alpha1api.ListDatabase, error) {
- filter := func(db v3alpha1api.ListDatabase) bool {
- return int64(db.Id) == databaseId
+) (*postgresflex.ListDatabase, error) {
+ filter := func(db postgresflex.ListDatabase) bool {
+ return db.Id != nil && *db.Id == databaseId
}
return getDatabase(ctx, client, projectId, region, instanceId, filter)
}
@@ -36,9 +35,9 @@ func getDatabaseByName(
ctx context.Context,
client databaseClientReader,
projectId, region, instanceId, databaseName string,
-) (*v3alpha1api.ListDatabase, error) {
- filter := func(db v3alpha1api.ListDatabase) bool {
- return db.Name == databaseName
+) (*postgresflex.ListDatabase, error) {
+ filter := func(db postgresflex.ListDatabase) bool {
+ return db.Name != nil && *db.Name == databaseName
}
return getDatabase(ctx, client, projectId, region, instanceId, filter)
}
@@ -49,8 +48,8 @@ func getDatabase(
ctx context.Context,
client databaseClientReader,
projectId, region, instanceId string,
- filter func(db v3alpha1api.ListDatabase) bool,
-) (*v3alpha1api.ListDatabase, error) {
+ filter func(db postgresflex.ListDatabase) bool,
+) (*postgresflex.ListDatabase, error) {
if projectId == "" || region == "" || instanceId == "" {
return nil, fmt.Errorf("all parameters (project, region, instance) are required")
}
@@ -59,18 +58,18 @@ func getDatabase(
for page := int32(1); ; page++ {
res, err := client.ListDatabasesRequest(ctx, projectId, region, instanceId).
- Page(page).Size(pageSize).Sort(v3alpha1api.DATABASESORT_DATABASE_ID_ASC).Execute()
+ Page(page).Size(pageSize).Sort(postgresflex.DATABASESORT_INDEX_ASC).Execute()
if err != nil {
return nil, fmt.Errorf("requesting database list (page %d): %w", page, err)
}
// If the API returns no databases, we have reached the end of the list.
- if len(res.Databases) == 0 {
+ if res.Databases == nil || len(*res.Databases) == 0 {
break
}
// Iterate over databases to find a match
- for _, db := range res.Databases {
+ for _, db := range *res.Databases {
if filter(db) {
foundDb := db
return &foundDb, nil
@@ -80,8 +79,3 @@ func getDatabase(
return nil, fmt.Errorf("database not found for instance %s", instanceId)
}
-
-// cleanString removes leading and trailing quotes which are sometimes returned by the API.
-func cleanString(s string) string {
- return strings.Trim(s, "\"")
-}
diff --git a/stackit/internal/services/postgresflexalpha/database/functions_test.go b/stackit/internal/services/postgresflexalpha/database/functions_test.go
index 5c11117a..7ec941db 100644
--- a/stackit/internal/services/postgresflexalpha/database/functions_test.go
+++ b/stackit/internal/services/postgresflexalpha/database/functions_test.go
@@ -4,100 +4,126 @@ import (
"context"
"testing"
- "github.com/google/go-cmp/cmp"
- "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
+ "github.com/stackitcloud/stackit-sdk-go/core/utils"
+ postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
)
+type mockRequest struct {
+ executeFunc func() (*postgresflex.ListDatabasesResponse, error)
+}
+
+func (m *mockRequest) Page(_ int64) postgresflex.ApiListDatabasesRequestRequest { return m }
+func (m *mockRequest) Size(_ int64) postgresflex.ApiListDatabasesRequestRequest { return m }
+func (m *mockRequest) Sort(_ postgresflex.DatabaseSort) postgresflex.ApiListDatabasesRequestRequest {
+ return m
+}
+func (m *mockRequest) Execute() (*postgresflex.ListDatabasesResponse, error) {
+ return m.executeFunc()
+}
+
+type mockDBClient struct {
+ executeRequest func() postgresflex.ApiListDatabasesRequestRequest
+}
+
+var _ databaseClientReader = (*mockDBClient)(nil)
+
+func (m *mockDBClient) ListDatabasesRequest(
+ _ context.Context,
+ _, _, _ string,
+) postgresflex.ApiListDatabasesRequestRequest {
+ return m.executeRequest()
+}
+
func TestGetDatabase(t *testing.T) {
- mockResp := func(page int32) (*v3alpha1api.ListDatabasesResponse, error) {
+ mockResp := func(page int64) (*postgresflex.ListDatabasesResponse, error) {
if page == 1 {
- return &v3alpha1api.ListDatabasesResponse{
- Databases: []v3alpha1api.ListDatabase{
- {Id: int32(1), Name: "first"},
- {Id: int32(2), Name: "second"},
+ return &postgresflex.ListDatabasesResponse{
+ Databases: &[]postgresflex.ListDatabase{
+ {Id: utils.Ptr(int64(1)), Name: utils.Ptr("first")},
+ {Id: utils.Ptr(int64(2)), Name: utils.Ptr("second")},
},
- Pagination: v3alpha1api.Pagination{
- Page: int32(1),
- TotalPages: int32(2),
- Size: int32(3),
+ Pagination: &postgresflex.Pagination{
+ Page: utils.Ptr(int64(1)),
+ TotalPages: utils.Ptr(int64(2)),
+ Size: utils.Ptr(int64(3)),
},
}, nil
}
if page == 2 {
- return &v3alpha1api.ListDatabasesResponse{
- Databases: []v3alpha1api.ListDatabase{{Id: int32(3), Name: "three"}},
- Pagination: v3alpha1api.Pagination{
- Page: int32(2),
- TotalPages: int32(2),
- Size: int32(3),
+ return &postgresflex.ListDatabasesResponse{
+ Databases: &[]postgresflex.ListDatabase{{Id: utils.Ptr(int64(3)), Name: utils.Ptr("three")}},
+ Pagination: &postgresflex.Pagination{
+ Page: utils.Ptr(int64(2)),
+ TotalPages: utils.Ptr(int64(2)),
+ Size: utils.Ptr(int64(3)),
},
}, nil
}
- return &v3alpha1api.ListDatabasesResponse{
- Databases: []v3alpha1api.ListDatabase{},
- Pagination: v3alpha1api.Pagination{
- Page: int32(3),
- TotalPages: int32(2),
- Size: int32(3),
+ return &postgresflex.ListDatabasesResponse{
+ Databases: &[]postgresflex.ListDatabase{},
+ Pagination: &postgresflex.Pagination{
+ Page: utils.Ptr(int64(3)),
+ TotalPages: utils.Ptr(int64(2)),
+ Size: utils.Ptr(int64(3)),
},
}, nil
}
tests := []struct {
description string
- projectID string
+ projectId string
region string
- instanceID string
+ instanceId string
wantErr bool
wantDbName string
- wantDbID int32
+ wantDbId int64
}{
{
description: "Success - Found by name on first page",
- projectID: "pid", region: "reg", instanceID: "inst",
+ projectId: "pid", region: "reg", instanceId: "inst",
wantErr: false,
wantDbName: "second",
},
{
description: "Success - Found by id on first page",
- projectID: "pid", region: "reg", instanceID: "inst",
+ projectId: "pid", region: "reg", instanceId: "inst",
wantErr: false,
- wantDbID: 2,
+ wantDbId: 2,
},
{
description: "Success - Found by name on second page",
- projectID: "pid", region: "reg", instanceID: "inst",
+ projectId: "pid", region: "reg", instanceId: "inst",
wantErr: false,
wantDbName: "three",
},
{
description: "Success - Found by id on second page",
- projectID: "pid", region: "reg", instanceID: "inst",
+ projectId: "pid", region: "reg", instanceId: "inst",
wantErr: false,
- wantDbID: 1,
+ wantDbId: 1,
},
{
description: "Error - API failure",
- projectID: "pid", region: "reg", instanceID: "inst",
+ projectId: "pid", region: "reg", instanceId: "inst",
wantErr: true,
},
{
description: "Error - Missing parameters",
- projectID: "", region: "reg", instanceID: "inst",
+ projectId: "", region: "reg", instanceId: "inst",
wantErr: true,
},
{
description: "Error - Search by name not found after all pages",
- projectID: "pid", region: "reg", instanceID: "inst",
+ projectId: "pid", region: "reg", instanceId: "inst",
wantDbName: "non-existent",
wantErr: true,
},
{
description: "Error - Search by id not found after all pages",
- projectID: "pid", region: "reg", instanceID: "inst",
- wantDbID: 999999,
+ projectId: "pid", region: "reg", instanceId: "inst",
+ wantDbId: 999999,
wantErr: true,
},
}
@@ -105,95 +131,66 @@ func TestGetDatabase(t *testing.T) {
for _, tt := range tests {
t.Run(
tt.description, func(t *testing.T) {
- var currentPage int32
-
- mockCall := func(_ v3alpha1api.ApiListDatabasesRequestRequest) (*v3alpha1api.ListDatabasesResponse, error) {
- currentPage++
- return mockResp(currentPage)
+ var currentPage int64
+ client := &mockDBClient{
+ executeRequest: func() postgresflex.ApiListDatabasesRequestRequest {
+ return &mockRequest{
+ executeFunc: func() (*postgresflex.ListDatabasesResponse, error) {
+ currentPage++
+ return mockResp(currentPage)
+ },
+ }
+ },
}
- client := &v3alpha1api.DefaultAPIServiceMock{
- ListDatabasesRequestExecuteMock: &mockCall,
- }
-
- var actual *v3alpha1api.ListDatabase
+ var actual *postgresflex.ListDatabase
var errDB error
if tt.wantDbName != "" {
actual, errDB = getDatabaseByName(
t.Context(),
client,
- tt.projectID,
+ tt.projectId,
tt.region,
- tt.instanceID,
+ tt.instanceId,
tt.wantDbName,
)
- } else if tt.wantDbID != 0 {
+ } else if tt.wantDbId != 0 {
actual, errDB = getDatabaseById(
t.Context(),
client,
- tt.projectID,
+ tt.projectId,
tt.region,
- tt.instanceID,
- int64(tt.wantDbID),
+ tt.instanceId,
+ tt.wantDbId,
)
} else {
actual, errDB = getDatabase(
context.Background(),
client,
- tt.projectID,
+ tt.projectId,
tt.region,
- tt.instanceID,
- func(_ v3alpha1api.ListDatabase) bool { return false },
+ tt.instanceId,
+ func(_ postgresflex.ListDatabase) bool { return false },
)
}
if (errDB != nil) != tt.wantErr {
- t.Errorf("getDatabaseByNameOrID() error = %v, wantErr %v", errDB, tt.wantErr)
+ t.Errorf("getDatabase() error = %v, wantErr %v", errDB, tt.wantErr)
return
}
if !tt.wantErr && tt.wantDbName != "" && actual != nil {
- if actual.Name != tt.wantDbName {
- t.Errorf("getDatabaseByNameOrID() got name = %v, want %v", actual.Name, tt.wantDbName)
+ if *actual.Name != tt.wantDbName {
+ t.Errorf("getDatabase() got name = %v, want %v", *actual.Name, tt.wantDbName)
}
}
- if !tt.wantErr && tt.wantDbID != 0 && actual != nil {
- if actual.Id != tt.wantDbID {
- t.Errorf("getDatabaseByNameOrID() got id = %v, want %v", actual.Id, tt.wantDbID)
+ if !tt.wantErr && tt.wantDbId != 0 && actual != nil {
+ if *actual.Id != tt.wantDbId {
+ t.Errorf("getDatabase() got id = %v, want %v", *actual.Id, tt.wantDbId)
}
}
},
)
}
}
-
-func TestCleanString(t *testing.T) {
- testcases := []struct {
- name string
- given string
- expected string
- }{
- {
- name: "should remove quotes",
- given: "\"quoted\"",
- expected: "quoted",
- },
- {
- name: "should not change unquoted string",
- given: "unquoted",
- expected: "unquoted",
- },
- }
-
- for _, tc := range testcases {
- t.Run(
- tc.name, func(t *testing.T) {
- actual := cleanString(tc.given)
- if diff := cmp.Diff(tc.expected, actual); diff != "" {
- t.Errorf("string mismatch (-want +got):\n%s", diff)
- }
- },
- )
- }
-}
diff --git a/stackit/internal/services/postgresflexalpha/database/mapper.go b/stackit/internal/services/postgresflexalpha/database/mapper.go
deleted file mode 100644
index 6ce2200c..00000000
--- a/stackit/internal/services/postgresflexalpha/database/mapper.go
+++ /dev/null
@@ -1,93 +0,0 @@
-package postgresflexalpha
-
-import (
- "fmt"
- "strconv"
-
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-)
-
-// mapFields maps fields from a ListDatabase API response to a resourceModel for the data source.
-func mapFields(
- source *v3alpha1api.ListDatabase,
- model *dataSourceModel,
- region string,
-) error {
- if source == nil {
- return fmt.Errorf("response is nil")
- }
- if source.Id == 0 {
- return fmt.Errorf("id not present")
- }
- if model == nil {
- return fmt.Errorf("model given is nil")
- }
-
- var databaseId int64
- if model.DatabaseId.ValueInt64() != 0 {
- databaseId = model.DatabaseId.ValueInt64()
- } else if source.Id != 0 {
- databaseId = int64(source.Id)
- } else {
- return fmt.Errorf("database id not present")
- }
-
- model.Id = types.Int64Value(databaseId)
- model.DatabaseId = types.Int64Value(databaseId)
- model.Name = types.StringValue(source.GetName())
- model.Owner = types.StringValue(cleanString(source.Owner))
- model.Region = types.StringValue(region)
- model.ProjectId = types.StringValue(model.ProjectId.ValueString())
- model.InstanceId = types.StringValue(model.InstanceId.ValueString())
- model.TerraformID = utils.BuildInternalTerraformId(
- model.ProjectId.ValueString(),
- region,
- model.InstanceId.ValueString(),
- strconv.FormatInt(databaseId, 10),
- )
-
- return nil
-}
-
-// mapResourceFields maps fields from a GetDatabase API response to a resourceModel for the resource.
-func mapResourceFields(source *v3alpha1api.GetDatabaseResponse, model *resourceModel) error {
- if source == nil {
- return fmt.Errorf("response is nil")
- }
- if source.Id == 0 {
- return fmt.Errorf("id not present")
- }
- if model == nil {
- return fmt.Errorf("model input is nil")
- }
-
- var databaseId int64
- if model.Id.ValueInt64() != 0 {
- databaseId = model.Id.ValueInt64()
- } else if source.Id != 0 {
- databaseId = int64(source.Id)
- } else {
- return fmt.Errorf("database id not present")
- }
-
- model.Id = types.Int64Value(databaseId)
- model.DatabaseId = types.Int64Value(databaseId)
- model.Name = types.StringValue(source.GetName())
- model.Owner = types.StringValue(cleanString(source.Owner))
- return nil
-}
-
-// toCreatePayload converts the resource model to an API create payload.
-func toCreatePayload(model *resourceModel) (*v3alpha1api.CreateDatabaseRequestPayload, error) {
- if model == nil {
- return nil, fmt.Errorf("nil model")
- }
-
- return &v3alpha1api.CreateDatabaseRequestPayload{
- Name: model.Name.ValueString(),
- Owner: model.Owner.ValueStringPointer(),
- }, nil
-}
diff --git a/stackit/internal/services/postgresflexalpha/database/mapper_test.go b/stackit/internal/services/postgresflexalpha/database/mapper_test.go
deleted file mode 100644
index 684af672..00000000
--- a/stackit/internal/services/postgresflexalpha/database/mapper_test.go
+++ /dev/null
@@ -1,248 +0,0 @@
-package postgresflexalpha
-
-import (
- "testing"
-
- "github.com/google/go-cmp/cmp"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/stackitcloud/stackit-sdk-go/core/utils"
-
- postgresflexalpha "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
-
- datasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/datasources_gen"
-)
-
-func TestMapFields(t *testing.T) {
- type given struct {
- source *postgresflexalpha.ListDatabase
- model *dataSourceModel
- region string
- }
- type expected struct {
- model *dataSourceModel
- err bool
- }
-
- testcases := []struct {
- name string
- given given
- expected expected
- }{
- {
- name: "should map fields correctly",
- given: given{
- source: &postgresflexalpha.ListDatabase{
- Id: int32(1),
- Name: "my-db",
- Owner: "my-owner",
- },
- model: &dataSourceModel{
- DatabaseModel: datasource.DatabaseModel{
- ProjectId: types.StringValue("my-project"),
- InstanceId: types.StringValue("my-instance"),
- },
- },
- region: "eu01",
- },
- expected: expected{
- model: &dataSourceModel{
- DatabaseModel: datasource.DatabaseModel{
- Id: types.Int64Value(1),
- Name: types.StringValue("my-db"),
- Owner: types.StringValue("my-owner"),
- Region: types.StringValue("eu01"),
- DatabaseId: types.Int64Value(1),
- InstanceId: types.StringValue("my-instance"),
- ProjectId: types.StringValue("my-project"),
- },
- TerraformID: types.StringValue("my-project,eu01,my-instance,1"),
- },
- },
- },
- {
- name: "should preserve existing model ID",
- given: given{
- source: &postgresflexalpha.ListDatabase{
- Id: int32(1),
- Name: "my-db",
- },
- model: &dataSourceModel{
- DatabaseModel: datasource.DatabaseModel{
- Id: types.Int64Value(1),
- ProjectId: types.StringValue("my-project"),
- InstanceId: types.StringValue("my-instance"),
- },
- },
- region: "eu01",
- },
- expected: expected{
- model: &dataSourceModel{
- DatabaseModel: datasource.DatabaseModel{
- Id: types.Int64Value(1),
- Name: types.StringValue("my-db"),
- Owner: types.StringValue(""),
- DatabaseId: types.Int64Value(1),
- Region: types.StringValue("eu01"),
- InstanceId: types.StringValue("my-instance"),
- ProjectId: types.StringValue("my-project"),
- },
- TerraformID: types.StringValue("my-project,eu01,my-instance,1"),
- },
- },
- },
- {
- name: "should fail on nil source",
- given: given{
- source: nil,
- model: &dataSourceModel{},
- },
- expected: expected{err: true},
- },
- {
- name: "should fail on nil source ID",
- given: given{
- source: &postgresflexalpha.ListDatabase{Id: 0},
- model: &dataSourceModel{},
- },
- expected: expected{err: true},
- },
- {
- name: "should fail on nil model",
- given: given{
- source: &postgresflexalpha.ListDatabase{Id: int32(1)},
- model: nil,
- },
- expected: expected{err: true},
- },
- }
-
- for _, tc := range testcases {
- t.Run(
- tc.name, func(t *testing.T) {
- err := mapFields(tc.given.source, tc.given.model, tc.given.region)
- if (err != nil) != tc.expected.err {
- t.Fatalf("expected error: %v, got: %v", tc.expected.err, err)
- }
- if err == nil {
- if diff := cmp.Diff(tc.expected.model, tc.given.model); diff != "" {
- t.Errorf("model mismatch (-want +got):\n%s", diff)
- }
- }
- },
- )
- }
-}
-
-func TestMapResourceFields(t *testing.T) {
- type given struct {
- source *postgresflexalpha.GetDatabaseResponse
- model *resourceModel
- }
- type expected struct {
- model *resourceModel
- err bool
- }
-
- testcases := []struct {
- name string
- given given
- expected expected
- }{
- {
- name: "should map fields correctly",
- given: given{
- source: &postgresflexalpha.GetDatabaseResponse{
- Id: int32(1),
- Name: "my-db",
- Owner: "my-owner",
- },
- model: &resourceModel{},
- },
- expected: expected{
- model: &resourceModel{
- Id: types.Int64Value(1),
- Name: types.StringValue("my-db"),
- Owner: types.StringValue("my-owner"),
- DatabaseId: types.Int64Value(1),
- },
- },
- },
- {
- name: "should fail on nil source",
- given: given{
- source: nil,
- model: &resourceModel{},
- },
- expected: expected{err: true},
- },
- }
-
- for _, tc := range testcases {
- t.Run(
- tc.name, func(t *testing.T) {
- err := mapResourceFields(tc.given.source, tc.given.model)
- if (err != nil) != tc.expected.err {
- t.Fatalf("expected error: %v, got: %v", tc.expected.err, err)
- }
- if err == nil {
- if diff := cmp.Diff(tc.expected.model, tc.given.model); diff != "" {
- t.Errorf("model mismatch (-want +got):\n%s", diff)
- }
- }
- },
- )
- }
-}
-
-func TestToCreatePayload(t *testing.T) {
- type given struct {
- model *resourceModel
- }
- type expected struct {
- payload *postgresflexalpha.CreateDatabaseRequestPayload
- err bool
- }
-
- testcases := []struct {
- name string
- given given
- expected expected
- }{
- {
- name: "should convert model to payload",
- given: given{
- model: &resourceModel{
- Name: types.StringValue("my-db"),
- Owner: types.StringValue("my-owner"),
- },
- },
- expected: expected{
- payload: &postgresflexalpha.CreateDatabaseRequestPayload{
- Name: "my-db",
- Owner: utils.Ptr("my-owner"),
- },
- },
- },
- {
- name: "should fail on nil model",
- given: given{model: nil},
- expected: expected{err: true},
- },
- }
-
- for _, tc := range testcases {
- t.Run(
- tc.name, func(t *testing.T) {
- actual, err := toCreatePayload(tc.given.model)
- if (err != nil) != tc.expected.err {
- t.Fatalf("expected error: %v, got: %v", tc.expected.err, err)
- }
- if err == nil {
- if diff := cmp.Diff(tc.expected.payload, actual); diff != "" {
- t.Errorf("payload mismatch (-want +got):\n%s", diff)
- }
- }
- },
- )
- }
-}
diff --git a/stackit/internal/services/postgresflexalpha/database/planModifiers.yaml b/stackit/internal/services/postgresflexalpha/database/planModifiers.yaml
deleted file mode 100644
index f3f70aeb..00000000
--- a/stackit/internal/services/postgresflexalpha/database/planModifiers.yaml
+++ /dev/null
@@ -1,35 +0,0 @@
-fields:
- - name: 'id'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'database_id'
- modifiers:
- - 'UseStateForUnknown'
- validators:
- - validate.NoSeparator
- - validate.UUID
-
- - name: 'instance_id'
- validators:
- - validate.NoSeparator
- - validate.UUID
- modifiers:
- - 'RequiresReplace'
- - 'UseStateForUnknown'
-
- - name: 'project_id'
- modifiers:
- - 'RequiresReplace'
- - 'UseStateForUnknown'
- validators:
- - validate.NoSeparator
- - validate.UUID
-
- - name: 'name'
- validators:
- - validate.NoSeparator
-
- - name: 'region'
- modifiers:
- - 'RequiresReplace'
diff --git a/stackit/internal/services/postgresflexalpha/database/resource.go b/stackit/internal/services/postgresflexalpha/database/resource.go
index 6db70746..67d1e477 100644
--- a/stackit/internal/services/postgresflexalpha/database/resource.go
+++ b/stackit/internal/services/postgresflexalpha/database/resource.go
@@ -2,70 +2,70 @@ package postgresflexalpha
import (
"context"
- _ "embed"
+ "errors"
"fmt"
"math"
+ "net/http"
+ "regexp"
"strconv"
"strings"
- "time"
+ "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/int64planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
-
+ "github.com/stackitcloud/stackit-sdk-go/core/oapierror"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- postgresflexalphaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/resources_gen"
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
- postgresflexalphaWait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/postgresflexalpha"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/validate"
)
+// Ensure the implementation satisfies the expected interfaces.
var (
- // Ensure the implementation satisfies the expected interfaces.
_ resource.Resource = &databaseResource{}
_ resource.ResourceWithConfigure = &databaseResource{}
_ resource.ResourceWithImportState = &databaseResource{}
_ resource.ResourceWithModifyPlan = &databaseResource{}
- _ resource.ResourceWithIdentity = &databaseResource{}
-
- // Error message constants
- extractErrorSummary = "extracting failed"
- extractErrorMessage = "Extracting identity data: %v"
)
+type Model struct {
+ Id types.String `tfsdk:"id"` // needed by TF
+ DatabaseId types.Int64 `tfsdk:"database_id"`
+ InstanceId types.String `tfsdk:"instance_id"`
+ ProjectId types.String `tfsdk:"project_id"`
+ Name types.String `tfsdk:"name"`
+ Owner types.String `tfsdk:"owner"`
+ Region types.String `tfsdk:"region"`
+}
+
// NewDatabaseResource is a helper function to simplify the provider implementation.
func NewDatabaseResource() resource.Resource {
return &databaseResource{}
}
-// resourceModel describes the resource data model.
-type resourceModel = postgresflexalphaResGen.DatabaseModel
-
-// DatabaseResourceIdentityModel describes the resource's identity attributes.
-type DatabaseResourceIdentityModel struct {
- ProjectID types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- InstanceID types.String `tfsdk:"instance_id"`
- DatabaseID types.Int64 `tfsdk:"database_id"`
-}
-
// databaseResource is the resource implementation.
type databaseResource struct {
- client *v3alpha1api.APIClient
+ client *postgresflexalpha.APIClient
providerData core.ProviderData
}
-// ModifyPlan adjusts the plan to set the correct region.
+// ModifyPlan implements resource.ResourceWithModifyPlan.
+// Use the modifier to set the effective region in the current plan.
func (r *databaseResource) ModifyPlan(
ctx context.Context,
req resource.ModifyPlanRequest,
resp *resource.ModifyPlanResponse,
) { // nolint:gocritic // function signature required by Terraform
- var configModel resourceModel
+ var configModel Model
// skip initial empty configuration to avoid follow-up errors
if req.Config.Raw.IsNull() {
return
@@ -75,7 +75,7 @@ func (r *databaseResource) ModifyPlan(
return
}
- var planModel resourceModel
+ var planModel Model
resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...)
if resp.Diagnostics.HasError() {
return
@@ -117,46 +117,85 @@ func (r *databaseResource) Configure(
tflog.Info(ctx, "Postgres Flex database client configured")
}
-//go:embed planModifiers.yaml
-var modifiersFileByte []byte
-
// Schema defines the schema for the resource.
-func (r *databaseResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
- s := postgresflexalphaResGen.DatabaseResourceSchema(ctx)
-
- fields, err := utils.ReadModifiersConfig(modifiersFileByte)
- if err != nil {
- resp.Diagnostics.AddError("error during read modifiers config file", err.Error())
- return
+func (r *databaseResource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
+ descriptions := map[string]string{
+ "main": "Postgres Flex database resource schema. Must have a `region` specified in the provider configuration.",
+ "id": "Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`,`database_id`\".",
+ "database_id": "Database ID.",
+ "instance_id": "ID of the Postgres Flex instance.",
+ "project_id": "STACKIT project ID to which the instance is associated.",
+ "name": "Database name.",
+ "owner": "Username of the database owner.",
+ "region": "The resource region. If not defined, the provider region is used.",
}
- err = utils.AddPlanModifiersToResourceSchema(fields, &s)
- if err != nil {
- resp.Diagnostics.AddError("error adding plan modifiers", err.Error())
- return
- }
- resp.Schema = s
-}
-
-// IdentitySchema defines the schema for the resource's identity attributes.
-func (r *databaseResource) IdentitySchema(
- _ context.Context,
- _ resource.IdentitySchemaRequest,
- response *resource.IdentitySchemaResponse,
-) {
- response.IdentitySchema = identityschema.Schema{
- Attributes: map[string]identityschema.Attribute{
- "project_id": identityschema.StringAttribute{
- RequiredForImport: true,
+ resp.Schema = schema.Schema{
+ Description: descriptions["main"],
+ Attributes: map[string]schema.Attribute{
+ "id": schema.StringAttribute{
+ Description: descriptions["id"],
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.UseStateForUnknown(),
+ },
},
- "region": identityschema.StringAttribute{
- RequiredForImport: true,
+ "database_id": schema.Int64Attribute{
+ Description: descriptions["database_id"],
+ Computed: true,
+ PlanModifiers: []planmodifier.Int64{
+ int64planmodifier.UseStateForUnknown(),
+ },
+ Validators: []validator.Int64{},
},
- "instance_id": identityschema.StringAttribute{
- RequiredForImport: true,
+ "instance_id": schema.StringAttribute{
+ Description: descriptions["instance_id"],
+ Required: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ Validators: []validator.String{
+ validate.UUID(),
+ validate.NoSeparator(),
+ },
},
- "database_id": identityschema.Int64Attribute{
- RequiredForImport: true,
+ "project_id": schema.StringAttribute{
+ Description: descriptions["project_id"],
+ Required: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ Validators: []validator.String{
+ validate.UUID(),
+ validate.NoSeparator(),
+ },
+ },
+ "name": schema.StringAttribute{
+ Description: descriptions["name"],
+ Required: true,
+ PlanModifiers: []planmodifier.String{},
+ Validators: []validator.String{
+ stringvalidator.RegexMatches(
+ regexp.MustCompile("^[a-z]([a-z0-9]*)?$"),
+ "must start with a letter, must have lower case letters or numbers",
+ ),
+ },
+ },
+ "owner": schema.StringAttribute{
+ Description: descriptions["owner"],
+ Required: true,
+ PlanModifiers: []planmodifier.String{},
+ },
+ "region": schema.StringAttribute{
+ Optional: true,
+ // must be computed to allow for storing the override value from the provider
+ Computed: true,
+ Description: descriptions["region"],
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
},
},
}
@@ -168,8 +207,7 @@ func (r *databaseResource) Create(
req resource.CreateRequest,
resp *resource.CreateResponse,
) { // nolint:gocritic // function signature required by Terraform
- const funcErrorSummary = "[database CREATE] error"
- var model resourceModel
+ var model Model
diags := req.Plan.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
@@ -181,7 +219,6 @@ func (r *databaseResource) Create(
projectId := model.ProjectId.ValueString()
region := model.Region.ValueString()
instanceId := model.InstanceId.ValueString()
-
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "instance_id", instanceId)
ctx = tflog.SetField(ctx, "region", region)
@@ -192,77 +229,62 @@ func (r *databaseResource) Create(
core.LogAndAddError(
ctx,
&resp.Diagnostics,
- funcErrorSummary,
+ "Error creating database",
fmt.Sprintf("Creating API payload: %v", err),
)
return
}
// Create new database
- databaseResp, err := r.client.DefaultAPI.CreateDatabaseRequest(
+ databaseResp, err := r.client.CreateDatabaseRequest(
ctx,
projectId,
region,
instanceId,
).CreateDatabaseRequestPayload(*payload).Execute()
if err != nil {
- core.LogAndAddError(ctx, &resp.Diagnostics, funcErrorSummary, fmt.Sprintf("Calling API: %v", err))
+ core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating database", fmt.Sprintf("Calling API: %v", err))
return
}
- dbID, ok := databaseResp.GetIdOk()
- if !ok {
+ ctx = core.LogResponse(ctx)
+
+ if databaseResp == nil || databaseResp.Id == nil {
core.LogAndAddError(
ctx,
&resp.Diagnostics,
- funcErrorSummary,
- "API didn't return database Id. A database might although have been created",
+ "Error creating database",
+ "API didn't return database Id. A database might have been created",
)
return
}
- databaseId := int64(*dbID)
+ databaseId := *databaseResp.Id
ctx = tflog.SetField(ctx, "database_id", databaseId)
- ctx = core.LogResponse(ctx)
- // Save identity into Terraform state
- identity := DatabaseResourceIdentityModel{
- ProjectID: types.StringValue(projectId),
- Region: types.StringValue(region),
- InstanceID: types.StringValue(instanceId),
- DatabaseID: types.Int64Value(databaseId),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- database, err := postgresflexalphaWait.GetDatabaseByIdWaitHandler(ctx, r.client.DefaultAPI, projectId, instanceId, region, databaseId).
- SetTimeout(15 * time.Minute).
- SetSleepBeforeWait(15 * time.Second).
- WaitWithContext(ctx)
+ database, err := getDatabaseById(ctx, r.client, projectId, region, instanceId, databaseId)
if err != nil {
core.LogAndAddError(
ctx,
&resp.Diagnostics,
- funcErrorSummary,
+ "Error creating database",
fmt.Sprintf("Getting database details after creation: %v", err),
)
return
}
// Map response body to schema
- err = mapResourceFields(database, &model)
+ err = mapFields(database, &model, region)
if err != nil {
core.LogAndAddError(
ctx,
&resp.Diagnostics,
- funcErrorSummary,
- fmt.Sprintf("map resource fields: %v", err),
+ "Error creating database",
+ fmt.Sprintf("Processing API payload: %v", err),
)
return
}
-
// Set state to fully populated data
- resp.Diagnostics.Append(resp.State.Set(ctx, model)...)
+ diags = resp.State.Set(ctx, model)
+ resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
@@ -275,7 +297,7 @@ func (r *databaseResource) Read(
req resource.ReadRequest,
resp *resource.ReadResponse,
) { // nolint:gocritic // function signature required by Terraform
- var model resourceModel
+ var model Model
diags := req.State.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
@@ -286,32 +308,28 @@ func (r *databaseResource) Read(
projectId := model.ProjectId.ValueString()
instanceId := model.InstanceId.ValueString()
- region := model.Region.ValueString()
databaseId := model.DatabaseId.ValueInt64()
-
+ region := r.providerData.GetRegionWithOverride(model.Region)
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "instance_id", instanceId)
- ctx = tflog.SetField(ctx, "region", region)
ctx = tflog.SetField(ctx, "database_id", databaseId)
+ ctx = tflog.SetField(ctx, "region", region)
- databaseResp, err := postgresflexalphaWait.GetDatabaseByIdWaitHandler(ctx, r.client.DefaultAPI, projectId, instanceId, region, databaseId).
- SetTimeout(15 * time.Minute).
- SetSleepBeforeWait(15 * time.Second).
- WaitWithContext(ctx)
+ databaseResp, err := getDatabaseById(ctx, r.client, projectId, region, instanceId, databaseId)
if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error creating database",
- fmt.Sprintf("Getting database details after creation: %v", err),
- )
+ oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
+ if (ok && oapiErr.StatusCode == http.StatusNotFound) || errors.Is(err, errDatabaseNotFound) {
+ resp.State.RemoveResource(ctx)
+ return
+ }
+ core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading database", fmt.Sprintf("Calling API: %v", err))
return
}
ctx = core.LogResponse(ctx)
// Map response body to schema
- err = mapResourceFields(databaseResp, &model)
+ err = mapFields(databaseResp, &model, region)
if err != nil {
core.LogAndAddError(
ctx,
@@ -322,18 +340,6 @@ func (r *databaseResource) Read(
return
}
- // Save identity into Terraform state
- identity := DatabaseResourceIdentityModel{
- ProjectID: types.StringValue(projectId),
- Region: types.StringValue(region),
- InstanceID: types.StringValue(instanceId),
- DatabaseID: types.Int64Value(int64(databaseResp.GetId())),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
// Set refreshed state
diags = resp.State.Set(ctx, model)
resp.Diagnostics.Append(diags...)
@@ -349,7 +355,7 @@ func (r *databaseResource) Update(
req resource.UpdateRequest,
resp *resource.UpdateResponse,
) {
- var model resourceModel
+ var model Model
diags := req.Plan.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
@@ -360,16 +366,21 @@ func (r *databaseResource) Update(
projectId := model.ProjectId.ValueString()
instanceId := model.InstanceId.ValueString()
- region := model.Region.ValueString()
- databaseId := model.DatabaseId.ValueInt64()
+ databaseId64 := model.DatabaseId.ValueInt64()
+ if databaseId64 > math.MaxInt32 {
+ core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (databaseId)")
+ return
+ }
+ databaseId := int32(databaseId64)
+ region := model.Region.ValueString()
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "instance_id", instanceId)
- ctx = tflog.SetField(ctx, "region", region)
ctx = tflog.SetField(ctx, "database_id", databaseId)
+ ctx = tflog.SetField(ctx, "region", region)
// Retrieve values from state
- var stateModel resourceModel
+ var stateModel Model
diags = req.State.Get(ctx, &stateModel)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
@@ -377,7 +388,7 @@ func (r *databaseResource) Update(
}
modified := false
- var payload v3alpha1api.UpdateDatabasePartiallyRequestPayload
+ var payload postgresflexalpha.UpdateDatabasePartiallyRequestPayload
if stateModel.Name != model.Name {
payload.Name = model.Name.ValueStringPointer()
modified = true
@@ -393,18 +404,13 @@ func (r *databaseResource) Update(
return
}
- if databaseId > math.MaxInt32 {
- core.LogAndAddError(ctx, &resp.Diagnostics, "error updating database", "databaseID out of bounds for int32")
- return
- }
- databaseID32 := int32(databaseId) //nolint:gosec // TODO
// Update existing database
- err := r.client.DefaultAPI.UpdateDatabasePartiallyRequest(
+ res, err := r.client.UpdateDatabasePartiallyRequest(
ctx,
projectId,
region,
instanceId,
- databaseID32,
+ databaseId,
).UpdateDatabasePartiallyRequestPayload(payload).Execute()
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "error updating database", err.Error())
@@ -413,43 +419,20 @@ func (r *databaseResource) Update(
ctx = core.LogResponse(ctx)
- databaseResp, err := postgresflexalphaWait.GetDatabaseByIdWaitHandler(ctx, r.client.DefaultAPI, projectId, instanceId, region, databaseId).
- SetTimeout(15 * time.Minute).
- SetSleepBeforeWait(15 * time.Second).
- WaitWithContext(ctx)
- if err != nil {
- core.LogAndAddError(ctx, &resp.Diagnostics, "error updating database", err.Error())
- return
- }
-
- ctx = core.LogResponse(ctx)
-
// Map response body to schema
- err = mapResourceFields(databaseResp, &model)
+ err = mapFieldsUpdatePartially(res, &model, region)
if err != nil {
core.LogAndAddError(
ctx,
&resp.Diagnostics,
- "Error reading database",
+ "Error updating database",
fmt.Sprintf("Processing API payload: %v", err),
)
return
}
-
- // Save identity into Terraform state
- identity := DatabaseResourceIdentityModel{
- ProjectID: types.StringValue(projectId),
- Region: types.StringValue(region),
- InstanceID: types.StringValue(instanceId),
- DatabaseID: types.Int64Value(databaseId),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
// Set state to fully populated data
- resp.Diagnostics.Append(resp.State.Set(ctx, &model)...)
+ diags = resp.State.Set(ctx, model)
+ resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
@@ -462,44 +445,32 @@ func (r *databaseResource) Delete(
req resource.DeleteRequest,
resp *resource.DeleteResponse,
) { // nolint:gocritic // function signature required by Terraform
- var model resourceModel
+ var model Model
diags := req.State.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
- // Read identity data
- var identityData DatabaseResourceIdentityModel
- resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
ctx = core.InitProviderContext(ctx)
- projectId, region, instanceId, databaseId64, errExt := r.extractIdentityData(model, identityData)
- if errExt != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- extractErrorSummary,
- fmt.Sprintf(extractErrorMessage, errExt),
- )
- }
+ projectId := model.ProjectId.ValueString()
+ instanceId := model.InstanceId.ValueString()
+ databaseId64 := model.DatabaseId.ValueInt64()
if databaseId64 > math.MaxInt32 {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (databaseId)")
return
}
- databaseId := int32(databaseId64) // nolint:gosec // check is performed above
+ databaseId := int32(databaseId64)
+ region := model.Region.ValueString()
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "instance_id", instanceId)
- ctx = tflog.SetField(ctx, "region", region)
ctx = tflog.SetField(ctx, "database_id", databaseId)
+ ctx = tflog.SetField(ctx, "region", region)
// Delete existing record set
- err := r.client.DefaultAPI.DeleteDatabaseRequest(ctx, projectId, region, instanceId, databaseId).Execute()
+ err := r.client.DeleteDatabaseRequestExecute(ctx, projectId, region, instanceId, databaseId)
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting database", fmt.Sprintf("Calling API: %v", err))
}
@@ -510,116 +481,95 @@ func (r *databaseResource) Delete(
}
// ImportState imports a resource into the Terraform state on success.
-// The expected import identifier format is: [project_id],[region],[instance_id],[database_id]
+// The expected format of the resource import identifier is: project_id,zone_id,record_set_id
func (r *databaseResource) ImportState(
ctx context.Context,
req resource.ImportStateRequest,
resp *resource.ImportStateResponse,
) {
- ctx = core.InitProviderContext(ctx)
-
- if req.ID != "" {
- idParts := strings.Split(req.ID, core.Separator)
-
- if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" {
- core.LogAndAddError(
- ctx, &resp.Diagnostics,
- "Error importing database",
- fmt.Sprintf(
- "Expected import identifier with format [project_id],[region],[instance_id],[database_id], got %q",
- req.ID,
- ),
- )
- return
- }
-
- databaseId, err := strconv.ParseInt(idParts[3], 10, 64)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error importing database",
- fmt.Sprintf("Invalid database_id format: %q. It must be a valid integer.", idParts[3]),
- )
- return
- }
-
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("database_id"), databaseId)...)
-
- core.LogAndAddWarning(
- ctx,
- &resp.Diagnostics,
- "Postgresflex database imported with empty password",
- "The database password is not imported as it is only available upon creation of a new database. The password field will be empty.",
+ idParts := strings.Split(req.ID, core.Separator)
+ if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" {
+ core.LogAndAddError(
+ ctx, &resp.Diagnostics,
+ "Error importing database",
+ fmt.Sprintf(
+ "Expected import identifier with format [project_id],[region],[instance_id],[database_id], got %q",
+ req.ID,
+ ),
)
-
- tflog.Info(ctx, "Postgres Flex database state imported")
-
return
}
- // If no ID is provided, attempt to read identity attributes from the import configuration
- var identityData DatabaseResourceIdentityModel
- resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- projectId := identityData.ProjectID.ValueString()
- region := identityData.Region.ValueString()
- instanceId := identityData.InstanceID.ValueString()
- databaseId := identityData.DatabaseID.ValueInt64()
-
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("database_id"), databaseId)...)
-
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("database_id"), idParts[3])...)
+ core.LogAndAddWarning(
+ ctx,
+ &resp.Diagnostics,
+ "Postgresflex database imported with empty password",
+ "The database password is not imported as it is only available upon creation of a new database. The password field will be empty.",
+ )
tflog.Info(ctx, "Postgres Flex database state imported")
}
-// extractIdentityData extracts essential identifiers from the resource model, falling back to the identity model.
-func (r *databaseResource) extractIdentityData(
- model resourceModel,
- identity DatabaseResourceIdentityModel,
-) (projectId, region, instanceId string, databaseId int64, err error) {
- if !model.DatabaseId.IsNull() && !model.DatabaseId.IsUnknown() {
+func mapFields(resp *postgresflexalpha.ListDatabase, model *Model, region string) error {
+ if resp == nil {
+ return fmt.Errorf("response is nil")
+ }
+ if resp.Id == nil || *resp.Id == 0 {
+ return fmt.Errorf("id not present")
+ }
+ if model == nil {
+ return fmt.Errorf("model input is nil")
+ }
+
+ var databaseId int64
+ if model.DatabaseId.ValueInt64() != 0 {
databaseId = model.DatabaseId.ValueInt64()
+ } else if resp.Id != nil {
+ databaseId = *resp.Id
} else {
- if identity.DatabaseID.IsNull() || identity.DatabaseID.IsUnknown() {
- return "", "", "", 0, fmt.Errorf("database_id not found in config")
- }
- databaseId = identity.DatabaseID.ValueInt64()
+ return fmt.Errorf("database id not present")
}
-
- if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() {
- projectId = model.ProjectId.ValueString()
- } else {
- if identity.ProjectID.IsNull() || identity.ProjectID.IsUnknown() {
- return "", "", "", 0, fmt.Errorf("project_id not found in config")
- }
- projectId = identity.ProjectID.ValueString()
- }
-
- if !model.Region.IsNull() && !model.Region.IsUnknown() {
- region = r.providerData.GetRegionWithOverride(model.Region)
- } else {
- if identity.Region.IsNull() || identity.Region.IsUnknown() {
- return "", "", "", 0, fmt.Errorf("region not found in config")
- }
- region = r.providerData.GetRegionWithOverride(identity.Region)
- }
-
- if !model.InstanceId.IsNull() && !model.InstanceId.IsUnknown() {
- instanceId = model.InstanceId.ValueString()
- } else {
- if identity.InstanceID.IsNull() || identity.InstanceID.IsUnknown() {
- return "", "", "", 0, fmt.Errorf("instance_id not found in config")
- }
- instanceId = identity.InstanceID.ValueString()
- }
- return projectId, region, instanceId, databaseId, nil
+ model.Id = utils.BuildInternalTerraformId(
+ model.ProjectId.ValueString(), region, model.InstanceId.ValueString(), strconv.FormatInt(databaseId, 10),
+ )
+ model.DatabaseId = types.Int64Value(databaseId)
+ model.Name = types.StringPointerValue(resp.Name)
+ model.Region = types.StringValue(region)
+ model.Owner = types.StringPointerValue(cleanString(resp.Owner))
+ return nil
}
+
+func mapFieldsUpdatePartially(
+ res *postgresflexalpha.UpdateDatabasePartiallyResponse,
+ model *Model,
+ region string,
+) error {
+ if res == nil {
+ return fmt.Errorf("response is nil")
+ }
+ return mapFields(res.Database, model, region)
+}
+
+func cleanString(s *string) *string {
+ if s == nil {
+ return nil
+ }
+ res := strings.Trim(*s, "\"")
+ return &res
+}
+
+func toCreatePayload(model *Model) (*postgresflexalpha.CreateDatabaseRequestPayload, error) {
+ if model == nil {
+ return nil, fmt.Errorf("nil model")
+ }
+
+ return &postgresflexalpha.CreateDatabaseRequestPayload{
+ Name: model.Name.ValueStringPointer(),
+ Owner: model.Owner.ValueStringPointer(),
+ }, nil
+}
+
+var errDatabaseNotFound = errors.New("database not found")
diff --git a/stackit/internal/services/postgresflexalpha/database/resource_test.go b/stackit/internal/services/postgresflexalpha/database/resource_test.go
new file mode 100644
index 00000000..15bced10
--- /dev/null
+++ b/stackit/internal/services/postgresflexalpha/database/resource_test.go
@@ -0,0 +1,232 @@
+package postgresflexalpha
+
+import (
+ "reflect"
+ "testing"
+
+ "github.com/google/go-cmp/cmp"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/stackitcloud/stackit-sdk-go/core/utils"
+ postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
+)
+
+func TestMapFields(t *testing.T) {
+ const testRegion = "region"
+ tests := []struct {
+ description string
+ input *postgresflex.ListDatabase
+ region string
+ expected Model
+ isValid bool
+ }{
+ {
+ "default_values",
+ &postgresflex.ListDatabase{
+ Id: utils.Ptr(int64(1)),
+ },
+ testRegion,
+ Model{
+ Id: types.StringValue("pid,region,iid,1"),
+ DatabaseId: types.Int64Value(int64(1)),
+ InstanceId: types.StringValue("iid"),
+ ProjectId: types.StringValue("pid"),
+ Name: types.StringNull(),
+ Owner: types.StringNull(),
+ Region: types.StringValue(testRegion),
+ },
+ true,
+ },
+ {
+ "simple_values",
+ &postgresflex.ListDatabase{
+ Id: utils.Ptr(int64(1)),
+ Name: utils.Ptr("dbname"),
+ Owner: utils.Ptr("username"),
+ },
+ testRegion,
+ Model{
+ Id: types.StringValue("pid,region,iid,1"),
+ DatabaseId: types.Int64Value(int64(1)),
+ InstanceId: types.StringValue("iid"),
+ ProjectId: types.StringValue("pid"),
+ Name: types.StringValue("dbname"),
+ Owner: types.StringValue("username"),
+ Region: types.StringValue(testRegion),
+ },
+ true,
+ },
+ {
+ "null_fields_and_int_conversions",
+ &postgresflex.ListDatabase{
+ Id: utils.Ptr(int64(1)),
+ Name: utils.Ptr(""),
+ Owner: utils.Ptr(""),
+ },
+ testRegion,
+ Model{
+ Id: types.StringValue("pid,region,iid,1"),
+ DatabaseId: types.Int64Value(int64(1)),
+ InstanceId: types.StringValue("iid"),
+ ProjectId: types.StringValue("pid"),
+ Name: types.StringValue(""),
+ Owner: types.StringValue(""),
+ Region: types.StringValue(testRegion),
+ },
+ true,
+ },
+ {
+ "nil_response",
+ nil,
+ testRegion,
+ Model{},
+ false,
+ },
+ {
+ "empty_response",
+ &postgresflex.ListDatabase{},
+ testRegion,
+ Model{},
+ false,
+ },
+ {
+ "no_resource_id",
+ &postgresflex.ListDatabase{
+ Id: utils.Ptr(int64(0)),
+ Name: utils.Ptr("dbname"),
+ Owner: utils.Ptr("username"),
+ },
+ testRegion,
+ Model{},
+ false,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(
+ tt.description, func(t *testing.T) {
+ state := &Model{
+ ProjectId: tt.expected.ProjectId,
+ InstanceId: tt.expected.InstanceId,
+ }
+ err := mapFields(tt.input, state, tt.region)
+ if !tt.isValid && err == nil {
+ t.Fatalf("Should have failed")
+ }
+ if tt.isValid && err != nil {
+ t.Fatalf("Should not have failed: %v", err)
+ }
+ if tt.isValid {
+ diff := cmp.Diff(state, &tt.expected)
+ if diff != "" {
+ t.Fatalf("Data does not match: %s", diff)
+ }
+ }
+ },
+ )
+ }
+}
+
+func TestToCreatePayload(t *testing.T) {
+ tests := []struct {
+ description string
+ input *Model
+ expected *postgresflex.CreateDatabaseRequestPayload
+ isValid bool
+ }{
+ {
+ "default_values",
+ &Model{
+ Name: types.StringValue("dbname"),
+ Owner: types.StringValue("username"),
+ },
+ &postgresflex.CreateDatabaseRequestPayload{
+ Name: utils.Ptr("dbname"),
+ Owner: utils.Ptr("username"),
+ },
+ true,
+ },
+ {
+ "null_fields",
+ &Model{
+ Name: types.StringNull(),
+ Owner: types.StringNull(),
+ },
+ &postgresflex.CreateDatabaseRequestPayload{
+ Name: nil,
+ Owner: nil,
+ },
+ true,
+ },
+ {
+ "nil_model",
+ nil,
+ nil,
+ false,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(
+ tt.description, func(t *testing.T) {
+ output, err := toCreatePayload(tt.input)
+ if !tt.isValid && err == nil {
+ t.Fatalf("Should have failed")
+ }
+ if tt.isValid && err != nil {
+ t.Fatalf("Should not have failed: %v", err)
+ }
+ if tt.isValid {
+ diff := cmp.Diff(output, tt.expected)
+ if diff != "" {
+ t.Fatalf("Data does not match: %s", diff)
+ }
+ }
+ },
+ )
+ }
+}
+
+func Test_cleanString(t *testing.T) {
+ type args struct {
+ s *string
+ }
+ tests := []struct {
+ name string
+ args args
+ want *string
+ }{
+ {
+ name: "simple_value",
+ args: args{
+ s: utils.Ptr("mytest"),
+ },
+ want: utils.Ptr("mytest"),
+ },
+ {
+ name: "simple_value_with_quotes",
+ args: args{
+ s: utils.Ptr("\"mytest\""),
+ },
+ want: utils.Ptr("mytest"),
+ },
+ {
+ name: "simple_values_with_quotes",
+ args: args{
+ s: utils.Ptr("\"my test here\""),
+ },
+ want: utils.Ptr("my test here"),
+ },
+ {
+ name: "simple_values",
+ args: args{
+ s: utils.Ptr("my test here"),
+ },
+ want: utils.Ptr("my test here"),
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ if got := cleanString(tt.args.s); !reflect.DeepEqual(got, tt.want) {
+ t.Errorf("cleanString() = %v, want %v", got, tt.want)
+ }
+ })
+ }
+}
diff --git a/stackit/internal/services/postgresflexalpha/database/resources_gen/database_resource_gen.go b/stackit/internal/services/postgresflexalpha/database/resources_gen/database_resource_gen.go
index 6affc956..95f6b6e5 100644
--- a/stackit/internal/services/postgresflexalpha/database/resources_gen/database_resource_gen.go
+++ b/stackit/internal/services/postgresflexalpha/database/resources_gen/database_resource_gen.go
@@ -4,8 +4,6 @@ package postgresflexalpha
import (
"context"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
@@ -14,23 +12,11 @@ import (
func DatabaseResourceSchema(ctx context.Context) schema.Schema {
return schema.Schema{
Attributes: map[string]schema.Attribute{
- "database_id": schema.Int64Attribute{
- Optional: true,
- Computed: true,
- Description: "The ID of the database.",
- MarkdownDescription: "The ID of the database.",
- },
"id": schema.Int64Attribute{
Computed: true,
Description: "The id of the database.",
MarkdownDescription: "The id of the database.",
},
- "instance_id": schema.StringAttribute{
- Optional: true,
- Computed: true,
- Description: "The ID of the instance.",
- MarkdownDescription: "The ID of the instance.",
- },
"name": schema.StringAttribute{
Required: true,
Description: "The name of the database.",
@@ -42,33 +28,12 @@ func DatabaseResourceSchema(ctx context.Context) schema.Schema {
Description: "The owner of the database.",
MarkdownDescription: "The owner of the database.",
},
- "project_id": schema.StringAttribute{
- Optional: true,
- Computed: true,
- Description: "The STACKIT project ID.",
- MarkdownDescription: "The STACKIT project ID.",
- },
- "region": schema.StringAttribute{
- Optional: true,
- Computed: true,
- Description: "The region which should be addressed",
- MarkdownDescription: "The region which should be addressed",
- Validators: []validator.String{
- stringvalidator.OneOf(
- "eu01",
- ),
- },
- },
},
}
}
type DatabaseModel struct {
- DatabaseId types.Int64 `tfsdk:"database_id"`
- Id types.Int64 `tfsdk:"id"`
- InstanceId types.String `tfsdk:"instance_id"`
- Name types.String `tfsdk:"name"`
- Owner types.String `tfsdk:"owner"`
- ProjectId types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
+ Id types.Int64 `tfsdk:"id"`
+ Name types.String `tfsdk:"name"`
+ Owner types.String `tfsdk:"owner"`
}
diff --git a/stackit/internal/services/postgresflexalpha/flavor/datasource.go b/stackit/internal/services/postgresflexalpha/flavor/datasource.go
index 455baf14..dc660dd3 100644
--- a/stackit/internal/services/postgresflexalpha/flavor/datasource.go
+++ b/stackit/internal/services/postgresflexalpha/flavor/datasource.go
@@ -1,4 +1,4 @@
-package postgresflexalphaflavor
+package postgresFlexAlphaFlavor
import (
"context"
@@ -8,8 +8,7 @@ import (
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
- "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
-
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
postgresflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavors/datasources_gen"
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
@@ -17,7 +16,6 @@ import (
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-log/tflog"
-
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
)
@@ -30,13 +28,13 @@ type FlavorModel struct {
ProjectId types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
StorageClass types.String `tfsdk:"storage_class"`
- Cpu types.Int32 `tfsdk:"cpu"`
+ Cpu types.Int64 `tfsdk:"cpu"`
Description types.String `tfsdk:"description"`
Id types.String `tfsdk:"id"`
FlavorId types.String `tfsdk:"flavor_id"`
- MaxGb types.Int32 `tfsdk:"max_gb"`
- Memory types.Int32 `tfsdk:"ram"`
- MinGb types.Int32 `tfsdk:"min_gb"`
+ MaxGb types.Int64 `tfsdk:"max_gb"`
+ Memory types.Int64 `tfsdk:"ram"`
+ MinGb types.Int64 `tfsdk:"min_gb"`
NodeType types.String `tfsdk:"node_type"`
StorageClasses types.List `tfsdk:"storage_classes"`
}
@@ -48,7 +46,7 @@ func NewFlavorDataSource() datasource.DataSource {
// flavorDataSource is the data source implementation.
type flavorDataSource struct {
- client *v3alpha1api.APIClient
+ client *postgresflexalpha.APIClient
providerData core.ProviderData
}
@@ -86,12 +84,12 @@ func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaReques
Description: "The flavor description.",
MarkdownDescription: "The flavor description.",
},
- "cpu": schema.Int32Attribute{
+ "cpu": schema.Int64Attribute{
Required: true,
Description: "The cpu count of the instance.",
MarkdownDescription: "The cpu count of the instance.",
},
- "ram": schema.Int32Attribute{
+ "ram": schema.Int64Attribute{
Required: true,
Description: "The memory of the instance in Gibibyte.",
MarkdownDescription: "The memory of the instance in Gibibyte.",
@@ -116,12 +114,12 @@ func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaReques
Description: "The flavor id of the instance flavor.",
MarkdownDescription: "The flavor id of the instance flavor.",
},
- "max_gb": schema.Int32Attribute{
+ "max_gb": schema.Int64Attribute{
Computed: true,
Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
},
- "min_gb": schema.Int32Attribute{
+ "min_gb": schema.Int64Attribute{
Computed: true,
Description: "minimum storage which is required to order in Gigabyte.",
MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
@@ -138,10 +136,10 @@ func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaReques
"class": schema.StringAttribute{
Computed: true,
},
- "max_io_per_sec": schema.Int32Attribute{
+ "max_io_per_sec": schema.Int64Attribute{
Computed: true,
},
- "max_through_in_mb": schema.Int32Attribute{
+ "max_through_in_mb": schema.Int64Attribute{
Computed: true,
},
},
@@ -171,25 +169,25 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest,
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "region", region)
- flavors, err := getAllFlavors(ctx, r.client.DefaultAPI, projectId, region)
+ flavors, err := getAllFlavors(ctx, r.client, projectId, region)
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading flavors", fmt.Sprintf("getAllFlavors: %v", err))
return
}
- var foundFlavors []v3alpha1api.ListFlavors
+ var foundFlavors []postgresflexalpha.ListFlavors
for _, flavor := range flavors {
- if model.Cpu.ValueInt32() != flavor.Cpu {
+ if model.Cpu.ValueInt64() != *flavor.Cpu {
continue
}
- if model.Memory.ValueInt32() != flavor.Memory {
+ if model.Memory.ValueInt64() != *flavor.Memory {
continue
}
- if model.NodeType.ValueString() != flavor.NodeType {
+ if model.NodeType.ValueString() != *flavor.NodeType {
continue
}
- for _, sc := range flavor.StorageClasses {
- if model.StorageClass.ValueString() != sc.Class {
+ for _, sc := range *flavor.StorageClasses {
+ if model.StorageClass.ValueString() != *sc.Class {
continue
}
foundFlavors = append(foundFlavors, flavor)
@@ -205,11 +203,11 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest,
}
f := foundFlavors[0]
- model.Description = types.StringValue(f.Description)
- model.Id = utils.BuildInternalTerraformId(model.ProjectId.ValueString(), region, f.Id)
- model.FlavorId = types.StringValue(f.Id)
- model.MaxGb = types.Int32Value(f.MaxGB)
- model.MinGb = types.Int32Value(f.MinGB)
+ model.Description = types.StringValue(*f.Description)
+ model.Id = utils.BuildInternalTerraformId(model.ProjectId.ValueString(), region, *f.Id)
+ model.FlavorId = types.StringValue(*f.Id)
+ model.MaxGb = types.Int64Value(*f.MaxGB)
+ model.MinGb = types.Int64Value(*f.MinGB)
if f.StorageClasses == nil {
model.StorageClasses = types.ListNull(postgresflexalphaGen.StorageClassesType{
@@ -219,15 +217,15 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest,
})
} else {
var scList []attr.Value
- for _, sc := range f.StorageClasses {
+ for _, sc := range *f.StorageClasses {
scList = append(
scList,
postgresflexalphaGen.NewStorageClassesValueMust(
postgresflexalphaGen.StorageClassesValue{}.AttributeTypes(ctx),
map[string]attr.Value{
- "class": types.StringValue(sc.Class),
- "max_io_per_sec": types.Int32Value(sc.MaxIoPerSec),
- "max_through_in_mb": types.Int32Value(sc.MaxThroughInMb),
+ "class": types.StringValue(*sc.Class),
+ "max_io_per_sec": types.Int64Value(*sc.MaxIoPerSec),
+ "max_through_in_mb": types.Int64Value(*sc.MaxThroughInMb),
},
),
)
diff --git a/stackit/internal/services/postgresflexalpha/flavor/datasources_gen/flavors_data_source_gen.go b/stackit/internal/services/postgresflexalpha/flavor/datasources_gen/flavors_data_source_gen.go
index 19be2c9e..924d1375 100644
--- a/stackit/internal/services/postgresflexalpha/flavor/datasources_gen/flavors_data_source_gen.go
+++ b/stackit/internal/services/postgresflexalpha/flavor/datasources_gen/flavors_data_source_gen.go
@@ -23,7 +23,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
"flavors": schema.ListNestedAttribute{
NestedObject: schema.NestedAttributeObject{
Attributes: map[string]schema.Attribute{
- "cpu": schema.Int32Attribute{
+ "cpu": schema.Int64Attribute{
Computed: true,
Description: "The cpu count of the instance.",
MarkdownDescription: "The cpu count of the instance.",
@@ -38,17 +38,17 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The id of the instance flavor.",
MarkdownDescription: "The id of the instance flavor.",
},
- "max_gb": schema.Int32Attribute{
+ "max_gb": schema.Int64Attribute{
Computed: true,
Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
},
- "memory": schema.Int32Attribute{
+ "memory": schema.Int64Attribute{
Computed: true,
Description: "The memory of the instance in Gibibyte.",
MarkdownDescription: "The memory of the instance in Gibibyte.",
},
- "min_gb": schema.Int32Attribute{
+ "min_gb": schema.Int64Attribute{
Computed: true,
Description: "minimum storage which is required to order in Gigabyte.",
MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
@@ -64,10 +64,10 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
"class": schema.StringAttribute{
Computed: true,
},
- "max_io_per_sec": schema.Int32Attribute{
+ "max_io_per_sec": schema.Int64Attribute{
Computed: true,
},
- "max_through_in_mb": schema.Int32Attribute{
+ "max_through_in_mb": schema.Int64Attribute{
Computed: true,
},
},
@@ -92,7 +92,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
Description: "List of flavors available for the project.",
MarkdownDescription: "List of flavors available for the project.",
},
- "page": schema.Int32Attribute{
+ "page": schema.Int64Attribute{
Optional: true,
Computed: true,
Description: "Number of the page of items list to be returned.",
@@ -100,19 +100,19 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
},
"pagination": schema.SingleNestedAttribute{
Attributes: map[string]schema.Attribute{
- "page": schema.Int32Attribute{
+ "page": schema.Int64Attribute{
Computed: true,
},
- "size": schema.Int32Attribute{
+ "size": schema.Int64Attribute{
Computed: true,
},
"sort": schema.StringAttribute{
Computed: true,
},
- "total_pages": schema.Int32Attribute{
+ "total_pages": schema.Int64Attribute{
Computed: true,
},
- "total_rows": schema.Int32Attribute{
+ "total_rows": schema.Int64Attribute{
Computed: true,
},
},
@@ -138,7 +138,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
),
},
},
- "size": schema.Int32Attribute{
+ "size": schema.Int64Attribute{
Optional: true,
Computed: true,
Description: "Number of items to be returned on each page.",
@@ -178,11 +178,11 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
type FlavorsModel struct {
Flavors types.List `tfsdk:"flavors"`
- Page types.Int32 `tfsdk:"page"`
+ Page types.Int64 `tfsdk:"page"`
Pagination PaginationValue `tfsdk:"pagination"`
ProjectId types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
- Size types.Int32 `tfsdk:"size"`
+ Size types.Int64 `tfsdk:"size"`
Sort types.String `tfsdk:"sort"`
}
@@ -221,12 +221,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags
}
- cpuVal, ok := cpuAttribute.(basetypes.Int32Value)
+ cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`cpu expected to be basetypes.Int32Value, was: %T`, cpuAttribute))
+ fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
}
descriptionAttribute, ok := attributes["description"]
@@ -275,12 +275,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags
}
- maxGbVal, ok := maxGbAttribute.(basetypes.Int32Value)
+ maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`max_gb expected to be basetypes.Int32Value, was: %T`, maxGbAttribute))
+ fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
}
memoryAttribute, ok := attributes["memory"]
@@ -293,12 +293,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags
}
- memoryVal, ok := memoryAttribute.(basetypes.Int32Value)
+ memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`memory expected to be basetypes.Int32Value, was: %T`, memoryAttribute))
+ fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
}
minGbAttribute, ok := attributes["min_gb"]
@@ -311,12 +311,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags
}
- minGbVal, ok := minGbAttribute.(basetypes.Int32Value)
+ minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`min_gb expected to be basetypes.Int32Value, was: %T`, minGbAttribute))
+ fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
}
nodeTypeAttribute, ok := attributes["node_type"]
@@ -445,12 +445,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewFlavorsValueUnknown(), diags
}
- cpuVal, ok := cpuAttribute.(basetypes.Int32Value)
+ cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`cpu expected to be basetypes.Int32Value, was: %T`, cpuAttribute))
+ fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
}
descriptionAttribute, ok := attributes["description"]
@@ -499,12 +499,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewFlavorsValueUnknown(), diags
}
- maxGbVal, ok := maxGbAttribute.(basetypes.Int32Value)
+ maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`max_gb expected to be basetypes.Int32Value, was: %T`, maxGbAttribute))
+ fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
}
memoryAttribute, ok := attributes["memory"]
@@ -517,12 +517,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewFlavorsValueUnknown(), diags
}
- memoryVal, ok := memoryAttribute.(basetypes.Int32Value)
+ memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`memory expected to be basetypes.Int32Value, was: %T`, memoryAttribute))
+ fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
}
minGbAttribute, ok := attributes["min_gb"]
@@ -535,12 +535,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewFlavorsValueUnknown(), diags
}
- minGbVal, ok := minGbAttribute.(basetypes.Int32Value)
+ minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`min_gb expected to be basetypes.Int32Value, was: %T`, minGbAttribute))
+ fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
}
nodeTypeAttribute, ok := attributes["node_type"]
@@ -664,12 +664,12 @@ func (t FlavorsType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = FlavorsValue{}
type FlavorsValue struct {
- Cpu basetypes.Int32Value `tfsdk:"cpu"`
+ Cpu basetypes.Int64Value `tfsdk:"cpu"`
Description basetypes.StringValue `tfsdk:"description"`
Id basetypes.StringValue `tfsdk:"id"`
- MaxGb basetypes.Int32Value `tfsdk:"max_gb"`
- Memory basetypes.Int32Value `tfsdk:"memory"`
- MinGb basetypes.Int32Value `tfsdk:"min_gb"`
+ MaxGb basetypes.Int64Value `tfsdk:"max_gb"`
+ Memory basetypes.Int64Value `tfsdk:"memory"`
+ MinGb basetypes.Int64Value `tfsdk:"min_gb"`
NodeType basetypes.StringValue `tfsdk:"node_type"`
StorageClasses basetypes.ListValue `tfsdk:"storage_classes"`
state attr.ValueState
@@ -681,12 +681,12 @@ func (v FlavorsValue) ToTerraformValue(ctx context.Context) (tftypes.Value, erro
var val tftypes.Value
var err error
- attrTypes["cpu"] = basetypes.Int32Type{}.TerraformType(ctx)
+ attrTypes["cpu"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["description"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["id"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["max_gb"] = basetypes.Int32Type{}.TerraformType(ctx)
- attrTypes["memory"] = basetypes.Int32Type{}.TerraformType(ctx)
- attrTypes["min_gb"] = basetypes.Int32Type{}.TerraformType(ctx)
+ attrTypes["max_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["memory"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["min_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["node_type"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["storage_classes"] = basetypes.ListType{
ElemType: StorageClassesValue{}.Type(ctx),
@@ -821,12 +821,12 @@ func (v FlavorsValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue,
}
attributeTypes := map[string]attr.Type{
- "cpu": basetypes.Int32Type{},
+ "cpu": basetypes.Int64Type{},
"description": basetypes.StringType{},
"id": basetypes.StringType{},
- "max_gb": basetypes.Int32Type{},
- "memory": basetypes.Int32Type{},
- "min_gb": basetypes.Int32Type{},
+ "max_gb": basetypes.Int64Type{},
+ "memory": basetypes.Int64Type{},
+ "min_gb": basetypes.Int64Type{},
"node_type": basetypes.StringType{},
"storage_classes": basetypes.ListType{
ElemType: StorageClassesValue{}.Type(ctx),
@@ -917,12 +917,12 @@ func (v FlavorsValue) Type(ctx context.Context) attr.Type {
func (v FlavorsValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
- "cpu": basetypes.Int32Type{},
+ "cpu": basetypes.Int64Type{},
"description": basetypes.StringType{},
"id": basetypes.StringType{},
- "max_gb": basetypes.Int32Type{},
- "memory": basetypes.Int32Type{},
- "min_gb": basetypes.Int32Type{},
+ "max_gb": basetypes.Int64Type{},
+ "memory": basetypes.Int64Type{},
+ "min_gb": basetypes.Int64Type{},
"node_type": basetypes.StringType{},
"storage_classes": basetypes.ListType{
ElemType: StorageClassesValue{}.Type(ctx),
@@ -983,12 +983,12 @@ func (t StorageClassesType) ValueFromObject(ctx context.Context, in basetypes.Ob
return nil, diags
}
- maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int32Value)
+ maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int32Value, was: %T`, maxIoPerSecAttribute))
+ fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute))
}
maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
@@ -1001,12 +1001,12 @@ func (t StorageClassesType) ValueFromObject(ctx context.Context, in basetypes.Ob
return nil, diags
}
- maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int32Value)
+ maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int32Value, was: %T`, maxThroughInMbAttribute))
+ fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute))
}
if diags.HasError() {
@@ -1112,12 +1112,12 @@ func NewStorageClassesValue(attributeTypes map[string]attr.Type, attributes map[
return NewStorageClassesValueUnknown(), diags
}
- maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int32Value)
+ maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int32Value, was: %T`, maxIoPerSecAttribute))
+ fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute))
}
maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
@@ -1130,12 +1130,12 @@ func NewStorageClassesValue(attributeTypes map[string]attr.Type, attributes map[
return NewStorageClassesValueUnknown(), diags
}
- maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int32Value)
+ maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int32Value, was: %T`, maxThroughInMbAttribute))
+ fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute))
}
if diags.HasError() {
@@ -1219,8 +1219,8 @@ var _ basetypes.ObjectValuable = StorageClassesValue{}
type StorageClassesValue struct {
Class basetypes.StringValue `tfsdk:"class"`
- MaxIoPerSec basetypes.Int32Value `tfsdk:"max_io_per_sec"`
- MaxThroughInMb basetypes.Int32Value `tfsdk:"max_through_in_mb"`
+ MaxIoPerSec basetypes.Int64Value `tfsdk:"max_io_per_sec"`
+ MaxThroughInMb basetypes.Int64Value `tfsdk:"max_through_in_mb"`
state attr.ValueState
}
@@ -1231,8 +1231,8 @@ func (v StorageClassesValue) ToTerraformValue(ctx context.Context) (tftypes.Valu
var err error
attrTypes["class"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["max_io_per_sec"] = basetypes.Int32Type{}.TerraformType(ctx)
- attrTypes["max_through_in_mb"] = basetypes.Int32Type{}.TerraformType(ctx)
+ attrTypes["max_io_per_sec"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["max_through_in_mb"] = basetypes.Int64Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
@@ -1295,8 +1295,8 @@ func (v StorageClassesValue) ToObjectValue(ctx context.Context) (basetypes.Objec
attributeTypes := map[string]attr.Type{
"class": basetypes.StringType{},
- "max_io_per_sec": basetypes.Int32Type{},
- "max_through_in_mb": basetypes.Int32Type{},
+ "max_io_per_sec": basetypes.Int64Type{},
+ "max_through_in_mb": basetypes.Int64Type{},
}
if v.IsNull() {
@@ -1359,8 +1359,8 @@ func (v StorageClassesValue) Type(ctx context.Context) attr.Type {
func (v StorageClassesValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"class": basetypes.StringType{},
- "max_io_per_sec": basetypes.Int32Type{},
- "max_through_in_mb": basetypes.Int32Type{},
+ "max_io_per_sec": basetypes.Int64Type{},
+ "max_through_in_mb": basetypes.Int64Type{},
}
}
@@ -1399,12 +1399,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
- pageVal, ok := pageAttribute.(basetypes.Int32Value)
+ pageVal, ok := pageAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
+ fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
}
sizeAttribute, ok := attributes["size"]
@@ -1417,12 +1417,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
- sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
+ sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
+ fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
}
sortAttribute, ok := attributes["sort"]
@@ -1453,12 +1453,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
- totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
+ totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
+ fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
}
totalRowsAttribute, ok := attributes["total_rows"]
@@ -1471,12 +1471,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags
}
- totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
+ totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
+ fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
}
if diags.HasError() {
@@ -1566,12 +1566,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
- pageVal, ok := pageAttribute.(basetypes.Int32Value)
+ pageVal, ok := pageAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
+ fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
}
sizeAttribute, ok := attributes["size"]
@@ -1584,12 +1584,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
- sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
+ sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
+ fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
}
sortAttribute, ok := attributes["sort"]
@@ -1620,12 +1620,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
- totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
+ totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
+ fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
}
totalRowsAttribute, ok := attributes["total_rows"]
@@ -1638,12 +1638,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags
}
- totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
+ totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
+ fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
}
if diags.HasError() {
@@ -1728,11 +1728,11 @@ func (t PaginationType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = PaginationValue{}
type PaginationValue struct {
- Page basetypes.Int32Value `tfsdk:"page"`
- Size basetypes.Int32Value `tfsdk:"size"`
+ Page basetypes.Int64Value `tfsdk:"page"`
+ Size basetypes.Int64Value `tfsdk:"size"`
Sort basetypes.StringValue `tfsdk:"sort"`
- TotalPages basetypes.Int32Value `tfsdk:"total_pages"`
- TotalRows basetypes.Int32Value `tfsdk:"total_rows"`
+ TotalPages basetypes.Int64Value `tfsdk:"total_pages"`
+ TotalRows basetypes.Int64Value `tfsdk:"total_rows"`
state attr.ValueState
}
@@ -1742,11 +1742,11 @@ func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, e
var val tftypes.Value
var err error
- attrTypes["page"] = basetypes.Int32Type{}.TerraformType(ctx)
- attrTypes["size"] = basetypes.Int32Type{}.TerraformType(ctx)
+ attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["total_pages"] = basetypes.Int32Type{}.TerraformType(ctx)
- attrTypes["total_rows"] = basetypes.Int32Type{}.TerraformType(ctx)
+ attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx)
+ attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
@@ -1824,11 +1824,11 @@ func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectVal
var diags diag.Diagnostics
attributeTypes := map[string]attr.Type{
- "page": basetypes.Int32Type{},
- "size": basetypes.Int32Type{},
+ "page": basetypes.Int64Type{},
+ "size": basetypes.Int64Type{},
"sort": basetypes.StringType{},
- "total_pages": basetypes.Int32Type{},
- "total_rows": basetypes.Int32Type{},
+ "total_pages": basetypes.Int64Type{},
+ "total_rows": basetypes.Int64Type{},
}
if v.IsNull() {
@@ -1900,10 +1900,10 @@ func (v PaginationValue) Type(ctx context.Context) attr.Type {
func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
- "page": basetypes.Int32Type{},
- "size": basetypes.Int32Type{},
+ "page": basetypes.Int64Type{},
+ "size": basetypes.Int64Type{},
"sort": basetypes.StringType{},
- "total_pages": basetypes.Int32Type{},
- "total_rows": basetypes.Int32Type{},
+ "total_pages": basetypes.Int64Type{},
+ "total_rows": basetypes.Int64Type{},
}
}
diff --git a/stackit/internal/services/postgresflexalpha/flavor/functions.go b/stackit/internal/services/postgresflexalpha/flavor/functions.go
index 97788dc8..5a631bc7 100644
--- a/stackit/internal/services/postgresflexalpha/flavor/functions.go
+++ b/stackit/internal/services/postgresflexalpha/flavor/functions.go
@@ -1,24 +1,24 @@
-package postgresflexalphaflavor
+package postgresFlexAlphaFlavor
import (
"context"
"fmt"
- "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
+ postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
)
type flavorsClientReader interface {
GetFlavorsRequest(
ctx context.Context,
projectId, region string,
- ) v3alpha1api.ApiGetFlavorsRequestRequest
+ ) postgresflex.ApiGetFlavorsRequestRequest
}
func getAllFlavors(ctx context.Context, client flavorsClientReader, projectId, region string) (
- []v3alpha1api.ListFlavors,
+ []postgresflex.ListFlavors,
error,
) {
- getAllFilter := func(_ v3alpha1api.ListFlavors) bool { return true }
+ getAllFilter := func(_ postgresflex.ListFlavors) bool { return true }
flavorList, err := getFlavorsByFilter(ctx, client, projectId, region, getAllFilter)
if err != nil {
return nil, err
@@ -32,29 +32,29 @@ func getFlavorsByFilter(
ctx context.Context,
client flavorsClientReader,
projectId, region string,
- filter func(db v3alpha1api.ListFlavors) bool,
-) ([]v3alpha1api.ListFlavors, error) {
+ filter func(db postgresflex.ListFlavors) bool,
+) ([]postgresflex.ListFlavors, error) {
if projectId == "" || region == "" {
return nil, fmt.Errorf("listing postgresflex flavors: projectId and region are required")
}
const pageSize = 25
- var result = make([]v3alpha1api.ListFlavors, 0)
+ var result = make([]postgresflex.ListFlavors, 0)
for page := int32(1); ; page++ {
res, err := client.GetFlavorsRequest(ctx, projectId, region).
- Page(page).Size(pageSize).Sort(v3alpha1api.FLAVORSORT_ID_ASC).Execute()
+ Page(page).Size(pageSize).Sort(postgresflex.FLAVORSORT_INDEX_ASC).Execute()
if err != nil {
return nil, fmt.Errorf("requesting flavors list (page %d): %w", page, err)
}
// If the API returns no flavors, we have reached the end of the list.
- if len(res.Flavors) == 0 {
+ if res.Flavors == nil || len(*res.Flavors) == 0 {
break
}
- for _, flavor := range res.Flavors {
+ for _, flavor := range *res.Flavors {
if filter(flavor) {
result = append(result, flavor)
}
diff --git a/stackit/internal/services/postgresflexalpha/flavor/functions_test.go b/stackit/internal/services/postgresflexalpha/flavor/functions_test.go
index 164f40a7..db8fa3bf 100644
--- a/stackit/internal/services/postgresflexalpha/flavor/functions_test.go
+++ b/stackit/internal/services/postgresflexalpha/flavor/functions_test.go
@@ -1,19 +1,19 @@
-package postgresflexalphaflavor
+package postgresFlexAlphaFlavor
-/*
import (
"context"
"testing"
- postgresflex "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
+ "github.com/stackitcloud/stackit-sdk-go/core/utils"
+ postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
)
type mockRequest struct {
executeFunc func() (*postgresflex.GetFlavorsResponse, error)
}
-func (m *mockRequest) Page(_ int32) postgresflex.ApiGetFlavorsRequestRequest { return m }
-func (m *mockRequest) Size(_ int32) postgresflex.ApiGetFlavorsRequestRequest { return m }
+func (m *mockRequest) Page(_ int64) postgresflex.ApiGetFlavorsRequestRequest { return m }
+func (m *mockRequest) Size(_ int64) postgresflex.ApiGetFlavorsRequestRequest { return m }
func (m *mockRequest) Sort(_ postgresflex.FlavorSort) postgresflex.ApiGetFlavorsRequestRequest {
return m
}
@@ -29,25 +29,25 @@ func (m *mockFlavorsClient) GetFlavorsRequest(_ context.Context, _, _ string) po
return m.executeRequest()
}
-var mockResp = func(page int32) (*postgresflex.GetFlavorsResponse, error) {
+var mockResp = func(page int64) (*postgresflex.GetFlavorsResponse, error) {
if page == 1 {
return &postgresflex.GetFlavorsResponse{
- Flavors: []postgresflex.ListFlavors{
- {Id: "flavor-1", Description: "first"},
- {Id: "flavor-2", Description: "second"},
+ Flavors: &[]postgresflex.ListFlavors{
+ {Id: utils.Ptr("flavor-1"), Description: utils.Ptr("first")},
+ {Id: utils.Ptr("flavor-2"), Description: utils.Ptr("second")},
},
}, nil
}
if page == 2 {
return &postgresflex.GetFlavorsResponse{
- Flavors: []postgresflex.ListFlavors{
- {Id: "flavor-3", Description: "three"},
+ Flavors: &[]postgresflex.ListFlavors{
+ {Id: utils.Ptr("flavor-3"), Description: utils.Ptr("three")},
},
}, nil
}
return &postgresflex.GetFlavorsResponse{
- Flavors: []postgresflex.ListFlavors{},
+ Flavors: &[]postgresflex.ListFlavors{},
}, nil
}
@@ -71,7 +71,7 @@ func TestGetFlavorsByFilter(t *testing.T) {
{
description: "Success - Filter flavors by description",
projectId: "pid", region: "reg",
- filter: func(f postgresflex.ListFlavors) bool { return f.Description == "first" },
+ filter: func(f postgresflex.ListFlavors) bool { return *f.Description == "first" },
wantCount: 1,
wantErr: false,
},
@@ -85,10 +85,10 @@ func TestGetFlavorsByFilter(t *testing.T) {
for _, tt := range tests {
t.Run(
tt.description, func(t *testing.T) {
- var currentPage int32
+ var currentPage int64
client := &mockFlavorsClient{
executeRequest: func() postgresflex.ApiGetFlavorsRequestRequest {
- return mockRequest{
+ return &mockRequest{
executeFunc: func() (*postgresflex.GetFlavorsResponse, error) {
currentPage++
return mockResp(currentPage)
@@ -112,10 +112,10 @@ func TestGetFlavorsByFilter(t *testing.T) {
}
func TestGetAllFlavors(t *testing.T) {
- var currentPage int32
+ var currentPage int64
client := &mockFlavorsClient{
executeRequest: func() postgresflex.ApiGetFlavorsRequestRequest {
- return mockRequest{
+ return &mockRequest{
executeFunc: func() (*postgresflex.GetFlavorsResponse, error) {
currentPage++
return mockResp(currentPage)
@@ -132,4 +132,3 @@ func TestGetAllFlavors(t *testing.T) {
t.Errorf("getAllFlavors() expected 3 flavor, got %d", len(res))
}
}
-*/
diff --git a/stackit/internal/services/postgresflexalpha/flavors/datasource.go b/stackit/internal/services/postgresflexalpha/flavors/datasource.go
index f5c99a82..26be805b 100644
--- a/stackit/internal/services/postgresflexalpha/flavors/datasource.go
+++ b/stackit/internal/services/postgresflexalpha/flavors/datasource.go
@@ -5,8 +5,7 @@ import (
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
-
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
postgresflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavors/datasources_gen"
@@ -22,19 +21,12 @@ func NewFlavorsDataSource() datasource.DataSource {
return &flavorsDataSource{}
}
-// dataSourceModel maps the data source schema data.
-type dataSourceModel = postgresflexalphaGen.FlavorsModel
-
type flavorsDataSource struct {
- client *v3alpha1api.APIClient
+ client *postgresflexalpha.APIClient
providerData core.ProviderData
}
-func (d *flavorsDataSource) Metadata(
- _ context.Context,
- req datasource.MetadataRequest,
- resp *datasource.MetadataResponse,
-) {
+func (d *flavorsDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
resp.TypeName = req.ProviderTypeName + "_postgresflexalpha_flavors"
}
@@ -43,11 +35,7 @@ func (d *flavorsDataSource) Schema(ctx context.Context, _ datasource.SchemaReque
}
// Configure adds the provider configured client to the data source.
-func (d *flavorsDataSource) Configure(
- ctx context.Context,
- req datasource.ConfigureRequest,
- resp *datasource.ConfigureResponse,
-) {
+func (d *flavorsDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
var ok bool
d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
if !ok {
@@ -63,7 +51,7 @@ func (d *flavorsDataSource) Configure(
}
func (d *flavorsDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var data dataSourceModel
+ var data postgresflexalphaGen.FlavorsModel
// Read Terraform configuration data into the model
resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
diff --git a/stackit/internal/services/postgresflexalpha/flavors/datasources_gen/flavors_data_source_gen.go b/stackit/internal/services/postgresflexalpha/flavors/datasources_gen/flavors_data_source_gen.go
index e0b76221..924d1375 100644
--- a/stackit/internal/services/postgresflexalpha/flavors/datasources_gen/flavors_data_source_gen.go
+++ b/stackit/internal/services/postgresflexalpha/flavors/datasources_gen/flavors_data_source_gen.go
@@ -33,7 +33,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The flavor description.",
MarkdownDescription: "The flavor description.",
},
- "tf_original_api_id": schema.StringAttribute{
+ "id": schema.StringAttribute{
Computed: true,
Description: "The id of the instance flavor.",
MarkdownDescription: "The id of the instance flavor.",
@@ -151,6 +151,8 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
MarkdownDescription: "Sorting of the flavors to be returned on each page.",
Validators: []validator.String{
stringvalidator.OneOf(
+ "index.desc",
+ "index.asc",
"cpu.desc",
"cpu.asc",
"flavor_description.asc",
diff --git a/stackit/internal/services/postgresflexalpha/instance/datasource.go b/stackit/internal/services/postgresflexalpha/instance/datasource.go
index cd7048e3..de0c5c74 100644
--- a/stackit/internal/services/postgresflexalpha/instance/datasource.go
+++ b/stackit/internal/services/postgresflexalpha/instance/datasource.go
@@ -5,16 +5,13 @@ import (
"fmt"
"net/http"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
-
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/datasources_gen"
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-log/tflog"
-
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
)
@@ -29,33 +26,19 @@ func NewInstanceDataSource() datasource.DataSource {
return &instanceDataSource{}
}
-// dataSourceModel maps the data source schema data.
-type dataSourceModel struct {
- postgresflexalpha2.InstanceModel
- TerraformID types.String `tfsdk:"id"`
-}
-
// instanceDataSource is the data source implementation.
type instanceDataSource struct {
- client *v3alpha1api.APIClient
+ client *postgresflexalpha.APIClient
providerData core.ProviderData
}
// Metadata returns the data source type name.
-func (r *instanceDataSource) Metadata(
- _ context.Context,
- req datasource.MetadataRequest,
- resp *datasource.MetadataResponse,
-) {
+func (r *instanceDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
resp.TypeName = req.ProviderTypeName + "_postgresflexalpha_instance"
}
// Configure adds the provider configured client to the data source.
-func (r *instanceDataSource) Configure(
- ctx context.Context,
- req datasource.ConfigureRequest,
- resp *datasource.ConfigureResponse,
-) {
+func (r *instanceDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
var ok bool
r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
if !ok {
@@ -76,12 +59,8 @@ func (r *instanceDataSource) Schema(ctx context.Context, _ datasource.SchemaRequ
}
// Read refreshes the Terraform state with the latest data.
-func (r *instanceDataSource) Read(
- ctx context.Context,
- req datasource.ReadRequest,
- resp *datasource.ReadResponse,
-) { // nolint:gocritic // function signature required by Terraform
- var model dataSourceModel
+func (r *instanceDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { // nolint:gocritic // function signature required by Terraform
+ var model postgresflexalpha2.InstanceModel
diags := req.Config.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
@@ -96,7 +75,7 @@ func (r *instanceDataSource) Read(
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "instance_id", instanceId)
ctx = tflog.SetField(ctx, "region", region)
- instanceResp, err := r.client.DefaultAPI.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
+ instanceResp, err := r.client.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
if err != nil {
utils.LogError(
ctx,
diff --git a/stackit/internal/services/postgresflexalpha/instance/datasources_gen/instance_data_source_gen.go b/stackit/internal/services/postgresflexalpha/instance/datasources_gen/instance_data_source_gen.go
index 58f88e01..5ff386fe 100644
--- a/stackit/internal/services/postgresflexalpha/instance/datasources_gen/instance_data_source_gen.go
+++ b/stackit/internal/services/postgresflexalpha/instance/datasources_gen/instance_data_source_gen.go
@@ -28,32 +28,20 @@ func InstanceDataSourceSchema(ctx context.Context) schema.Schema {
},
"backup_schedule": schema.StringAttribute{
Computed: true,
- Description: "The schedule for when the database backup will be created. Currently, ONLY daily schedules are supported (every 24 hours). The schedule is written as a cron schedule.",
- MarkdownDescription: "The schedule for when the database backup will be created. Currently, ONLY daily schedules are supported (every 24 hours). The schedule is written as a cron schedule.",
+ Description: "The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.",
+ MarkdownDescription: "The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.",
},
"connection_info": schema.SingleNestedAttribute{
Attributes: map[string]schema.Attribute{
- "write": schema.SingleNestedAttribute{
- Attributes: map[string]schema.Attribute{
- "host": schema.StringAttribute{
- Computed: true,
- Description: "The host of the instance.",
- MarkdownDescription: "The host of the instance.",
- },
- "port": schema.Int64Attribute{
- Computed: true,
- Description: "The port of the instance.",
- MarkdownDescription: "The port of the instance.",
- },
- },
- CustomType: WriteType{
- ObjectType: types.ObjectType{
- AttrTypes: WriteValue{}.AttributeTypes(ctx),
- },
- },
+ "host": schema.StringAttribute{
Computed: true,
- Description: "The DNS name and port in the instance overview",
- MarkdownDescription: "The DNS name and port in the instance overview",
+ Description: "The host of the instance.",
+ MarkdownDescription: "The host of the instance.",
+ },
+ "port": schema.Int64Attribute{
+ Computed: true,
+ Description: "The port of the instance.",
+ MarkdownDescription: "The port of the instance.",
},
},
CustomType: ConnectionInfoType{
@@ -62,8 +50,8 @@ func InstanceDataSourceSchema(ctx context.Context) schema.Schema {
},
},
Computed: true,
- Description: "The connection information of the instance",
- MarkdownDescription: "The connection information of the instance",
+ Description: "The DNS name and port in the instance overview",
+ MarkdownDescription: "The DNS name and port in the instance overview",
},
"encryption": schema.SingleNestedAttribute{
Attributes: map[string]schema.Attribute{
@@ -100,7 +88,7 @@ func InstanceDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The id of the instance flavor.",
MarkdownDescription: "The id of the instance flavor.",
},
- "tf_original_api_id": schema.StringAttribute{
+ "id": schema.StringAttribute{
Computed: true,
Description: "The ID of the instance.",
MarkdownDescription: "The ID of the instance.",
@@ -216,7 +204,7 @@ type InstanceModel struct {
ConnectionInfo ConnectionInfoValue `tfsdk:"connection_info"`
Encryption EncryptionValue `tfsdk:"encryption"`
FlavorId types.String `tfsdk:"flavor_id"`
- Id types.String `tfsdk:"tf_original_api_id"`
+ Id types.String `tfsdk:"id"`
InstanceId types.String `tfsdk:"instance_id"`
IsDeletable types.Bool `tfsdk:"is_deletable"`
Name types.String `tfsdk:"name"`
@@ -255,22 +243,40 @@ func (t ConnectionInfoType) ValueFromObject(ctx context.Context, in basetypes.Ob
attributes := in.Attributes()
- writeAttribute, ok := attributes["write"]
+ hostAttribute, ok := attributes["host"]
if !ok {
diags.AddError(
"Attribute Missing",
- `write is missing from object`)
+ `host is missing from object`)
return nil, diags
}
- writeVal, ok := writeAttribute.(basetypes.ObjectValue)
+ hostVal, ok := hostAttribute.(basetypes.StringValue)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`write expected to be basetypes.ObjectValue, was: %T`, writeAttribute))
+ fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
+ }
+
+ portAttribute, ok := attributes["port"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `port is missing from object`)
+
+ return nil, diags
+ }
+
+ portVal, ok := portAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
}
if diags.HasError() {
@@ -278,7 +284,8 @@ func (t ConnectionInfoType) ValueFromObject(ctx context.Context, in basetypes.Ob
}
return ConnectionInfoValue{
- Write: writeVal,
+ Host: hostVal,
+ Port: portVal,
state: attr.ValueStateKnown,
}, diags
}
@@ -346,22 +353,40 @@ func NewConnectionInfoValue(attributeTypes map[string]attr.Type, attributes map[
return NewConnectionInfoValueUnknown(), diags
}
- writeAttribute, ok := attributes["write"]
+ hostAttribute, ok := attributes["host"]
if !ok {
diags.AddError(
"Attribute Missing",
- `write is missing from object`)
+ `host is missing from object`)
return NewConnectionInfoValueUnknown(), diags
}
- writeVal, ok := writeAttribute.(basetypes.ObjectValue)
+ hostVal, ok := hostAttribute.(basetypes.StringValue)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`write expected to be basetypes.ObjectValue, was: %T`, writeAttribute))
+ fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
+ }
+
+ portAttribute, ok := attributes["port"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `port is missing from object`)
+
+ return NewConnectionInfoValueUnknown(), diags
+ }
+
+ portVal, ok := portAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
}
if diags.HasError() {
@@ -369,7 +394,8 @@ func NewConnectionInfoValue(attributeTypes map[string]attr.Type, attributes map[
}
return ConnectionInfoValue{
- Write: writeVal,
+ Host: hostVal,
+ Port: portVal,
state: attr.ValueStateKnown,
}, diags
}
@@ -442,401 +468,12 @@ func (t ConnectionInfoType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = ConnectionInfoValue{}
type ConnectionInfoValue struct {
- Write basetypes.ObjectValue `tfsdk:"write"`
- state attr.ValueState
-}
-
-func (v ConnectionInfoValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 1)
-
- var val tftypes.Value
- var err error
-
- attrTypes["write"] = basetypes.ObjectType{
- AttrTypes: WriteValue{}.AttributeTypes(ctx),
- }.TerraformType(ctx)
-
- objectType := tftypes.Object{AttributeTypes: attrTypes}
-
- switch v.state {
- case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 1)
-
- val, err = v.Write.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["write"] = val
-
- if err := tftypes.ValidateValue(objectType, vals); err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- return tftypes.NewValue(objectType, vals), nil
- case attr.ValueStateNull:
- return tftypes.NewValue(objectType, nil), nil
- case attr.ValueStateUnknown:
- return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
- default:
- panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
- }
-}
-
-func (v ConnectionInfoValue) IsNull() bool {
- return v.state == attr.ValueStateNull
-}
-
-func (v ConnectionInfoValue) IsUnknown() bool {
- return v.state == attr.ValueStateUnknown
-}
-
-func (v ConnectionInfoValue) String() string {
- return "ConnectionInfoValue"
-}
-
-func (v ConnectionInfoValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- var write basetypes.ObjectValue
-
- if v.Write.IsNull() {
- write = types.ObjectNull(
- WriteValue{}.AttributeTypes(ctx),
- )
- }
-
- if v.Write.IsUnknown() {
- write = types.ObjectUnknown(
- WriteValue{}.AttributeTypes(ctx),
- )
- }
-
- if !v.Write.IsNull() && !v.Write.IsUnknown() {
- write = types.ObjectValueMust(
- WriteValue{}.AttributeTypes(ctx),
- v.Write.Attributes(),
- )
- }
-
- attributeTypes := map[string]attr.Type{
- "write": basetypes.ObjectType{
- AttrTypes: WriteValue{}.AttributeTypes(ctx),
- },
- }
-
- if v.IsNull() {
- return types.ObjectNull(attributeTypes), diags
- }
-
- if v.IsUnknown() {
- return types.ObjectUnknown(attributeTypes), diags
- }
-
- objVal, diags := types.ObjectValue(
- attributeTypes,
- map[string]attr.Value{
- "write": write,
- })
-
- return objVal, diags
-}
-
-func (v ConnectionInfoValue) Equal(o attr.Value) bool {
- other, ok := o.(ConnectionInfoValue)
-
- if !ok {
- return false
- }
-
- if v.state != other.state {
- return false
- }
-
- if v.state != attr.ValueStateKnown {
- return true
- }
-
- if !v.Write.Equal(other.Write) {
- return false
- }
-
- return true
-}
-
-func (v ConnectionInfoValue) Type(ctx context.Context) attr.Type {
- return ConnectionInfoType{
- basetypes.ObjectType{
- AttrTypes: v.AttributeTypes(ctx),
- },
- }
-}
-
-func (v ConnectionInfoValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
- return map[string]attr.Type{
- "write": basetypes.ObjectType{
- AttrTypes: WriteValue{}.AttributeTypes(ctx),
- },
- }
-}
-
-var _ basetypes.ObjectTypable = WriteType{}
-
-type WriteType struct {
- basetypes.ObjectType
-}
-
-func (t WriteType) Equal(o attr.Type) bool {
- other, ok := o.(WriteType)
-
- if !ok {
- return false
- }
-
- return t.ObjectType.Equal(other.ObjectType)
-}
-
-func (t WriteType) String() string {
- return "WriteType"
-}
-
-func (t WriteType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributes := in.Attributes()
-
- hostAttribute, ok := attributes["host"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `host is missing from object`)
-
- return nil, diags
- }
-
- hostVal, ok := hostAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
- }
-
- portAttribute, ok := attributes["port"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `port is missing from object`)
-
- return nil, diags
- }
-
- portVal, ok := portAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
- }
-
- if diags.HasError() {
- return nil, diags
- }
-
- return WriteValue{
- Host: hostVal,
- Port: portVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewWriteValueNull() WriteValue {
- return WriteValue{
- state: attr.ValueStateNull,
- }
-}
-
-func NewWriteValueUnknown() WriteValue {
- return WriteValue{
- state: attr.ValueStateUnknown,
- }
-}
-
-func NewWriteValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (WriteValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
- ctx := context.Background()
-
- for name, attributeType := range attributeTypes {
- attribute, ok := attributes[name]
-
- if !ok {
- diags.AddError(
- "Missing WriteValue Attribute Value",
- "While creating a WriteValue value, a missing attribute value was detected. "+
- "A WriteValue must contain values for all attributes, even if null or unknown. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("WriteValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
- )
-
- continue
- }
-
- if !attributeType.Equal(attribute.Type(ctx)) {
- diags.AddError(
- "Invalid WriteValue Attribute Type",
- "While creating a WriteValue value, an invalid attribute value was detected. "+
- "A WriteValue must use a matching attribute type for the value. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("WriteValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
- fmt.Sprintf("WriteValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
- )
- }
- }
-
- for name := range attributes {
- _, ok := attributeTypes[name]
-
- if !ok {
- diags.AddError(
- "Extra WriteValue Attribute Value",
- "While creating a WriteValue value, an extra attribute value was detected. "+
- "A WriteValue must not contain values beyond the expected attribute types. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("Extra WriteValue Attribute Name: %s", name),
- )
- }
- }
-
- if diags.HasError() {
- return NewWriteValueUnknown(), diags
- }
-
- hostAttribute, ok := attributes["host"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `host is missing from object`)
-
- return NewWriteValueUnknown(), diags
- }
-
- hostVal, ok := hostAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
- }
-
- portAttribute, ok := attributes["port"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `port is missing from object`)
-
- return NewWriteValueUnknown(), diags
- }
-
- portVal, ok := portAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
- }
-
- if diags.HasError() {
- return NewWriteValueUnknown(), diags
- }
-
- return WriteValue{
- Host: hostVal,
- Port: portVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewWriteValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) WriteValue {
- object, diags := NewWriteValue(attributeTypes, attributes)
-
- if diags.HasError() {
- // This could potentially be added to the diag package.
- diagsStrings := make([]string, 0, len(diags))
-
- for _, diagnostic := range diags {
- diagsStrings = append(diagsStrings, fmt.Sprintf(
- "%s | %s | %s",
- diagnostic.Severity(),
- diagnostic.Summary(),
- diagnostic.Detail()))
- }
-
- panic("NewWriteValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
- }
-
- return object
-}
-
-func (t WriteType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
- if in.Type() == nil {
- return NewWriteValueNull(), nil
- }
-
- if !in.Type().Equal(t.TerraformType(ctx)) {
- return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
- }
-
- if !in.IsKnown() {
- return NewWriteValueUnknown(), nil
- }
-
- if in.IsNull() {
- return NewWriteValueNull(), nil
- }
-
- attributes := map[string]attr.Value{}
-
- val := map[string]tftypes.Value{}
-
- err := in.As(&val)
-
- if err != nil {
- return nil, err
- }
-
- for k, v := range val {
- a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
-
- if err != nil {
- return nil, err
- }
-
- attributes[k] = a
- }
-
- return NewWriteValueMust(WriteValue{}.AttributeTypes(ctx), attributes), nil
-}
-
-func (t WriteType) ValueType(ctx context.Context) attr.Value {
- return WriteValue{}
-}
-
-var _ basetypes.ObjectValuable = WriteValue{}
-
-type WriteValue struct {
Host basetypes.StringValue `tfsdk:"host"`
Port basetypes.Int64Value `tfsdk:"port"`
state attr.ValueState
}
-func (v WriteValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
+func (v ConnectionInfoValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
attrTypes := make(map[string]tftypes.Type, 2)
var val tftypes.Value
@@ -881,19 +518,19 @@ func (v WriteValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error)
}
}
-func (v WriteValue) IsNull() bool {
+func (v ConnectionInfoValue) IsNull() bool {
return v.state == attr.ValueStateNull
}
-func (v WriteValue) IsUnknown() bool {
+func (v ConnectionInfoValue) IsUnknown() bool {
return v.state == attr.ValueStateUnknown
}
-func (v WriteValue) String() string {
- return "WriteValue"
+func (v ConnectionInfoValue) String() string {
+ return "ConnectionInfoValue"
}
-func (v WriteValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
+func (v ConnectionInfoValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
var diags diag.Diagnostics
attributeTypes := map[string]attr.Type{
@@ -919,8 +556,8 @@ func (v WriteValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, d
return objVal, diags
}
-func (v WriteValue) Equal(o attr.Value) bool {
- other, ok := o.(WriteValue)
+func (v ConnectionInfoValue) Equal(o attr.Value) bool {
+ other, ok := o.(ConnectionInfoValue)
if !ok {
return false
@@ -945,15 +582,15 @@ func (v WriteValue) Equal(o attr.Value) bool {
return true
}
-func (v WriteValue) Type(ctx context.Context) attr.Type {
- return WriteType{
+func (v ConnectionInfoValue) Type(ctx context.Context) attr.Type {
+ return ConnectionInfoType{
basetypes.ObjectType{
AttrTypes: v.AttributeTypes(ctx),
},
}
}
-func (v WriteValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
+func (v ConnectionInfoValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"host": basetypes.StringType{},
"port": basetypes.Int64Type{},
diff --git a/stackit/internal/services/postgresflexalpha/instance/datasources_gen/instances_data_source_gen.go b/stackit/internal/services/postgresflexalpha/instance/datasources_gen/instances_data_source_gen.go
index 0407c13f..beb620dd 100644
--- a/stackit/internal/services/postgresflexalpha/instance/datasources_gen/instances_data_source_gen.go
+++ b/stackit/internal/services/postgresflexalpha/instance/datasources_gen/instances_data_source_gen.go
@@ -113,6 +113,8 @@ func InstancesDataSourceSchema(ctx context.Context) schema.Schema {
MarkdownDescription: "Sorting of the items to be returned on each page.",
Validators: []validator.String{
stringvalidator.OneOf(
+ "index.desc",
+ "index.asc",
"id.desc",
"id.asc",
"is_deletable.desc",
diff --git a/stackit/internal/services/postgresflexalpha/instance/functions.go b/stackit/internal/services/postgresflexalpha/instance/functions.go
index 6e7164b9..ac40f185 100644
--- a/stackit/internal/services/postgresflexalpha/instance/functions.go
+++ b/stackit/internal/services/postgresflexalpha/instance/functions.go
@@ -7,20 +7,35 @@ import (
"github.com/hashicorp/terraform-plugin-framework/attr"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
- postgresflex "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
-
+ "github.com/hashicorp/terraform-plugin-log/tflog"
+ postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
postgresflexalphadatasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/datasources_gen"
postgresflexalpharesource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/resources_gen"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
)
-func mapGetInstanceResponseToModel(
- ctx context.Context,
- m *postgresflexalpharesource.InstanceModel,
- resp *postgresflex.GetInstanceResponse,
-) error {
+func mapGetInstanceResponseToModel(ctx context.Context, m *postgresflexalpharesource.InstanceModel, resp *postgresflex.GetInstanceResponse) error {
+ tflog.Debug(ctx, ">>>> MSH DEBUG <<<<", map[string]interface{}{
+ "id": m.Id.ValueString(),
+ "instance_id": m.InstanceId.ValueString(),
+ "backup_schedule": m.BackupSchedule.ValueString(),
+ "flavor_id": m.FlavorId.ValueString(),
+ "encryption.kek_key_id": m.Encryption.KekKeyId.ValueString(),
+ "encryption.kek_key_ring_id": m.Encryption.KekKeyRingId.ValueString(),
+ "encryption.kek_key_version": m.Encryption.KekKeyVersion.ValueString(),
+ "encryption.service_account": m.Encryption.ServiceAccount.ValueString(),
+ "is_deletable": m.IsDeletable.ValueBool(),
+ "name": m.Name.ValueString(),
+ "status": m.Status.ValueString(),
+ "retention_days": m.RetentionDays.ValueInt64(),
+ "replicas": m.Replicas.ValueInt64(),
+ "network.instance_address": m.Network.InstanceAddress.ValueString(),
+ "network.router_address": m.Network.RouterAddress.ValueString(),
+ "version": m.Version.ValueString(),
+ "network.acl": m.Network.Acl.String(),
+ })
+
m.BackupSchedule = types.StringValue(resp.GetBackupSchedule())
- m.Encryption = postgresflexalpharesource.NewEncryptionValueNull()
if resp.HasEncryption() {
m.Encryption = postgresflexalpharesource.NewEncryptionValueMust(
m.Encryption.AttributeTypes(ctx),
@@ -33,36 +48,21 @@ func mapGetInstanceResponseToModel(
)
}
- isConnectionInfoIncomplete := resp.ConnectionInfo.Write.Host == "" || resp.ConnectionInfo.Write.Port == 0
+ m.ConnectionInfo.Host = types.StringValue("")
+ if host, ok := resp.ConnectionInfo.GetHostOk(); ok {
+ m.ConnectionInfo.Host = types.StringValue(host)
+ }
- if isConnectionInfoIncomplete {
- m.ConnectionInfo = postgresflexalpharesource.NewConnectionInfoValueNull()
- } else {
- m.ConnectionInfo = postgresflexalpharesource.NewConnectionInfoValueMust(
- postgresflexalpharesource.ConnectionInfoValue{}.AttributeTypes(ctx),
- map[string]attr.Value{
- // careful - we can not use NewWriteValueMust here
- "write": basetypes.NewObjectValueMust(
- postgresflexalpharesource.WriteValue{}.AttributeTypes(ctx),
- map[string]attr.Value{
- "host": types.StringValue(resp.ConnectionInfo.Write.Host),
- // note: IDE does not show that port is actually an int64 in the Schema
- "port": types.Int64Value(int64(resp.ConnectionInfo.Write.Port)),
- },
- ),
- },
- )
+ m.ConnectionInfo.Port = types.Int64Value(0)
+ if port, ok := resp.ConnectionInfo.GetPortOk(); ok {
+ m.ConnectionInfo.Port = types.Int64Value(port)
}
m.FlavorId = types.StringValue(resp.GetFlavorId())
if m.Id.IsNull() || m.Id.IsUnknown() {
- m.Id = utils.BuildInternalTerraformId(
- m.ProjectId.ValueString(),
- m.Region.ValueString(),
- m.InstanceId.ValueString(),
- )
+ m.Id = utils.BuildInternalTerraformId(m.ProjectId.ValueString(), m.Region.ValueString(), m.InstanceId.ValueString())
}
- m.InstanceId = types.StringValue(resp.Id)
+ m.InstanceId = types.StringPointerValue(resp.Id)
m.IsDeletable = types.BoolValue(resp.GetIsDeletable())
@@ -75,12 +75,12 @@ func mapGetInstanceResponseToModel(
netInstAdd := types.StringValue("")
if instAdd, ok := resp.Network.GetInstanceAddressOk(); ok {
- netInstAdd = types.StringValue(*instAdd)
+ netInstAdd = types.StringValue(instAdd)
}
netRtrAdd := types.StringValue("")
if rtrAdd, ok := resp.Network.GetRouterAddressOk(); ok {
- netRtrAdd = types.StringValue(*rtrAdd)
+ netRtrAdd = types.StringValue(rtrAdd)
}
net, diags := postgresflexalpharesource.NewNetworkValue(
@@ -98,7 +98,7 @@ func mapGetInstanceResponseToModel(
m.Network = net
m.Replicas = types.Int64Value(int64(resp.GetReplicas()))
- m.RetentionDays = types.Int64Value(int64(resp.GetRetentionDays()))
+ m.RetentionDays = types.Int64Value(resp.GetRetentionDays())
m.Name = types.StringValue(resp.GetName())
@@ -108,7 +108,7 @@ func mapGetInstanceResponseToModel(
postgresflexalpharesource.StorageValue{}.AttributeTypes(ctx),
map[string]attr.Value{
"performance_class": types.StringValue(resp.Storage.GetPerformanceClass()),
- "size": types.Int64Value(int64(resp.Storage.GetSize())),
+ "size": types.Int64Value(resp.Storage.GetSize()),
},
)
if diags.HasError() {
@@ -120,18 +120,14 @@ func mapGetInstanceResponseToModel(
return nil
}
-func mapGetDataInstanceResponseToModel(
- ctx context.Context,
- m *dataSourceModel,
- resp *postgresflex.GetInstanceResponse,
-) error {
+func mapGetDataInstanceResponseToModel(ctx context.Context, m *postgresflexalphadatasource.InstanceModel, resp *postgresflex.GetInstanceResponse) error {
m.BackupSchedule = types.StringValue(resp.GetBackupSchedule())
handleEncryption(m, resp)
- handleConnectionInfo(ctx, m, resp)
-
+ m.ConnectionInfo.Host = types.StringValue(resp.ConnectionInfo.GetHost())
+ m.ConnectionInfo.Port = types.Int64Value(resp.ConnectionInfo.GetPort())
m.FlavorId = types.StringValue(resp.GetFlavorId())
m.Id = utils.BuildInternalTerraformId(m.ProjectId.ValueString(), m.Region.ValueString(), m.InstanceId.ValueString())
- m.InstanceId = types.StringValue(resp.Id)
+ m.InstanceId = types.StringPointerValue(resp.Id)
m.IsDeletable = types.BoolValue(resp.GetIsDeletable())
m.Name = types.StringValue(resp.GetName())
@@ -141,13 +137,13 @@ func mapGetDataInstanceResponseToModel(
}
m.Replicas = types.Int64Value(int64(resp.GetReplicas()))
- m.RetentionDays = types.Int64Value(int64(resp.GetRetentionDays()))
+ m.RetentionDays = types.Int64Value(resp.GetRetentionDays())
m.Status = types.StringValue(string(resp.GetStatus()))
storage, diags := postgresflexalphadatasource.NewStorageValue(
postgresflexalphadatasource.StorageValue{}.AttributeTypes(ctx),
map[string]attr.Value{
"performance_class": types.StringValue(resp.Storage.GetPerformanceClass()),
- "size": types.Int64Value(int64(resp.Storage.GetSize())),
+ "size": types.Int64Value(resp.Storage.GetSize()),
},
)
if diags.HasError() {
@@ -158,48 +154,27 @@ func mapGetDataInstanceResponseToModel(
return nil
}
-func handleConnectionInfo(ctx context.Context, m *dataSourceModel, resp *postgresflex.GetInstanceResponse) {
- isConnectionInfoIncomplete := resp.ConnectionInfo.Write.Host == "" || resp.ConnectionInfo.Write.Port == 0
-
- if isConnectionInfoIncomplete {
- m.ConnectionInfo = postgresflexalphadatasource.NewConnectionInfoValueNull()
- } else {
- m.ConnectionInfo = postgresflexalphadatasource.NewConnectionInfoValueMust(
- postgresflexalphadatasource.ConnectionInfoValue{}.AttributeTypes(ctx),
- map[string]attr.Value{
- "write": types.ObjectValueMust(
- postgresflexalphadatasource.WriteValue{}.AttributeTypes(ctx),
- map[string]attr.Value{
- "host": types.StringValue(resp.ConnectionInfo.Write.Host),
- "port": types.Int64Value(int64(resp.ConnectionInfo.Write.Port)),
- },
- ),
- },
- )
- }
-}
-
-func handleNetwork(ctx context.Context, m *dataSourceModel, resp *postgresflex.GetInstanceResponse) error {
- netACL, diags := types.ListValueFrom(ctx, types.StringType, resp.Network.GetAcl())
+func handleNetwork(ctx context.Context, m *postgresflexalphadatasource.InstanceModel, resp *postgresflex.GetInstanceResponse) error {
+ netAcl, diags := types.ListValueFrom(ctx, types.StringType, resp.Network.GetAcl())
if diags.HasError() {
return fmt.Errorf("failed converting network acl from response")
}
instAddr := ""
if iA, ok := resp.Network.GetInstanceAddressOk(); ok {
- instAddr = *iA
+ instAddr = iA
}
rtrAddr := ""
if rA, ok := resp.Network.GetRouterAddressOk(); ok {
- rtrAddr = *rA
+ rtrAddr = rA
}
net, diags := postgresflexalphadatasource.NewNetworkValue(
postgresflexalphadatasource.NetworkValue{}.AttributeTypes(ctx),
map[string]attr.Value{
"access_scope": types.StringValue(string(resp.Network.GetAccessScope())),
- "acl": netACL,
+ "acl": netAcl,
"instance_address": types.StringValue(instAddr),
"router_address": types.StringValue(rtrAddr),
},
@@ -211,25 +186,25 @@ func handleNetwork(ctx context.Context, m *dataSourceModel, resp *postgresflex.G
return nil
}
-func handleEncryption(m *dataSourceModel, resp *postgresflex.GetInstanceResponse) {
+func handleEncryption(m *postgresflexalphadatasource.InstanceModel, resp *postgresflex.GetInstanceResponse) {
keyId := ""
if keyIdVal, ok := resp.Encryption.GetKekKeyIdOk(); ok {
- keyId = *keyIdVal
+ keyId = keyIdVal
}
keyRingId := ""
if keyRingIdVal, ok := resp.Encryption.GetKekKeyRingIdOk(); ok {
- keyRingId = *keyRingIdVal
+ keyRingId = keyRingIdVal
}
keyVersion := ""
if keyVersionVal, ok := resp.Encryption.GetKekKeyVersionOk(); ok {
- keyVersion = *keyVersionVal
+ keyVersion = keyVersionVal
}
svcAcc := ""
if svcAccVal, ok := resp.Encryption.GetServiceAccountOk(); ok {
- svcAcc = *svcAccVal
+ svcAcc = svcAccVal
}
m.Encryption = postgresflexalphadatasource.EncryptionValue{
diff --git a/stackit/internal/services/postgresflexalpha/instance/functions_test.go b/stackit/internal/services/postgresflexalpha/instance/functions_test.go
index 0fa85f16..19784ad8 100644
--- a/stackit/internal/services/postgresflexalpha/instance/functions_test.go
+++ b/stackit/internal/services/postgresflexalpha/instance/functions_test.go
@@ -1,191 +1,745 @@
package postgresflexalpha
import (
- "context"
- "testing"
-
- "github.com/hashicorp/terraform-plugin-framework/types"
-
- postgresflex "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
-
- postgresflexalpharesource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/resources_gen"
- utils2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
+ "github.com/stackitcloud/stackit-sdk-go/core/utils"
+ postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
)
-func Test_handleConnectionInfo(t *testing.T) {
- type args struct {
- ctx context.Context
- m *dataSourceModel
- hostName string
- port int32
- }
- tests := []struct {
- name string
- args args
- }{
- {
- name: "empty connection info",
- args: args{
- ctx: context.TODO(),
- m: &dataSourceModel{},
- hostName: "",
- port: 0,
- },
+//nolint:unused // TODO: remove when used
+type testFlavor struct {
+ Cpu int64
+ Description string
+ Id string
+ MaxGB int64
+ Memory int64
+ MinGB int64
+ NodeType string
+ StorageClasses []testFlavorStorageClass
+}
+
+//nolint:unused // TODO: remove when used
+type testFlavorStorageClass struct {
+ Class string
+ MaxIoPerSec int64
+ MaxThroughInMb int64
+}
+
+//nolint:unused // TODO: remove when used
+var responseList = []testFlavor{
+ {
+ Cpu: 1,
+ Description: "flavor 1.1",
+ Id: "flv1.1",
+ MaxGB: 500,
+ Memory: 1,
+ MinGB: 5,
+ NodeType: "single",
+ StorageClasses: []testFlavorStorageClass{
+ {Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
},
- {
- name: "empty connection info host",
- args: args{
- ctx: context.TODO(),
- m: &dataSourceModel{},
- hostName: "",
- port: 1234,
- },
+ },
+ {
+ Cpu: 1,
+ Description: "flavor 1.2",
+ Id: "flv1.2",
+ MaxGB: 500,
+ Memory: 2,
+ MinGB: 5,
+ NodeType: "single",
+ StorageClasses: []testFlavorStorageClass{
+ {Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
},
- {
- name: "empty connection info port",
- args: args{
- ctx: context.TODO(),
- m: &dataSourceModel{},
- hostName: "hostname",
- port: 0,
- },
+ },
+ {
+ Cpu: 1,
+ Description: "flavor 1.3",
+ Id: "flv1.3",
+ MaxGB: 500,
+ Memory: 3,
+ MinGB: 5,
+ NodeType: "single",
+ StorageClasses: []testFlavorStorageClass{
+ {Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
},
- {
- name: "valid connection info",
- args: args{
- ctx: context.TODO(),
- m: &dataSourceModel{},
- hostName: "host",
- port: 1000,
- },
+ },
+ {
+ Cpu: 1,
+ Description: "flavor 1.4",
+ Id: "flv1.4",
+ MaxGB: 500,
+ Memory: 4,
+ MinGB: 5,
+ NodeType: "single",
+ StorageClasses: []testFlavorStorageClass{
+ {Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
},
+ },
+ {
+ Cpu: 1,
+ Description: "flavor 1.5",
+ Id: "flv1.5",
+ MaxGB: 500,
+ Memory: 5,
+ MinGB: 5,
+ NodeType: "single",
+ StorageClasses: []testFlavorStorageClass{
+ {Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ },
+ },
+ {
+ Cpu: 1,
+ Description: "flavor 1.6",
+ Id: "flv1.6",
+ MaxGB: 500,
+ Memory: 6,
+ MinGB: 5,
+ NodeType: "single",
+ StorageClasses: []testFlavorStorageClass{
+ {Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ },
+ },
+ {
+ Cpu: 1,
+ Description: "flavor 1.7",
+ Id: "flv1.7",
+ MaxGB: 500,
+ Memory: 7,
+ MinGB: 5,
+ NodeType: "single",
+ StorageClasses: []testFlavorStorageClass{
+ {Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ },
+ },
+ {
+ Cpu: 1,
+ Description: "flavor 1.8",
+ Id: "flv1.8",
+ MaxGB: 500,
+ Memory: 8,
+ MinGB: 5,
+ NodeType: "single",
+ StorageClasses: []testFlavorStorageClass{
+ {Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ },
+ },
+ {
+ Cpu: 1,
+ Description: "flavor 1.9",
+ Id: "flv1.9",
+ MaxGB: 500,
+ Memory: 9,
+ MinGB: 5,
+ NodeType: "single",
+ StorageClasses: []testFlavorStorageClass{
+ {Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ },
+ },
+ /* ......................................................... */
+ {
+ Cpu: 2,
+ Description: "flavor 2.1",
+ Id: "flv2.1",
+ MaxGB: 500,
+ Memory: 1,
+ MinGB: 5,
+ NodeType: "single",
+ StorageClasses: []testFlavorStorageClass{
+ {Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ },
+ },
+ {
+ Cpu: 2,
+ Description: "flavor 2.2",
+ Id: "flv2.2",
+ MaxGB: 500,
+ Memory: 2,
+ MinGB: 5,
+ NodeType: "single",
+ StorageClasses: []testFlavorStorageClass{
+ {Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ },
+ },
+ {
+ Cpu: 2,
+ Description: "flavor 2.3",
+ Id: "flv2.3",
+ MaxGB: 500,
+ Memory: 3,
+ MinGB: 5,
+ NodeType: "single",
+ StorageClasses: []testFlavorStorageClass{
+ {Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ },
+ },
+ {
+ Cpu: 2,
+ Description: "flavor 2.4",
+ Id: "flv2.4",
+ MaxGB: 500,
+ Memory: 4,
+ MinGB: 5,
+ NodeType: "single",
+ StorageClasses: []testFlavorStorageClass{
+ {Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ },
+ },
+ {
+ Cpu: 2,
+ Description: "flavor 2.5",
+ Id: "flv2.5",
+ MaxGB: 500,
+ Memory: 5,
+ MinGB: 5,
+ NodeType: "single",
+ StorageClasses: []testFlavorStorageClass{
+ {Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ },
+ },
+ {
+ Cpu: 2,
+ Description: "flavor 2.6",
+ Id: "flv2.6",
+ MaxGB: 500,
+ Memory: 6,
+ MinGB: 5,
+ NodeType: "single",
+ StorageClasses: []testFlavorStorageClass{
+ {Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ },
+ },
+ /* ......................................................... */
+ {
+ Cpu: 1,
+ Description: "flavor 1.1 replica",
+ Id: "flv1.1r",
+ MaxGB: 500,
+ Memory: 1,
+ MinGB: 5,
+ NodeType: "Replica",
+ StorageClasses: []testFlavorStorageClass{
+ {Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ },
+ },
+ {
+ Cpu: 1,
+ Description: "flavor 1.2 replica",
+ Id: "flv1.2r",
+ MaxGB: 500,
+ Memory: 2,
+ MinGB: 5,
+ NodeType: "Replica",
+ StorageClasses: []testFlavorStorageClass{
+ {Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ },
+ },
+ {
+ Cpu: 1,
+ Description: "flavor 1.3 replica",
+ Id: "flv1.3r",
+ MaxGB: 500,
+ Memory: 3,
+ MinGB: 5,
+ NodeType: "Replica",
+ StorageClasses: []testFlavorStorageClass{
+ {Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ },
+ },
+ {
+ Cpu: 1,
+ Description: "flavor 1.4 replica",
+ Id: "flv1.4r",
+ MaxGB: 500,
+ Memory: 4,
+ MinGB: 5,
+ NodeType: "Replica",
+ StorageClasses: []testFlavorStorageClass{
+ {Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ },
+ },
+ {
+ Cpu: 1,
+ Description: "flavor 1.5 replica",
+ Id: "flv1.5r",
+ MaxGB: 500,
+ Memory: 5,
+ MinGB: 5,
+ NodeType: "Replica",
+ StorageClasses: []testFlavorStorageClass{
+ {Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ },
+ },
+ {
+ Cpu: 1,
+ Description: "flavor 1.6 replica",
+ Id: "flv1.6r",
+ MaxGB: 500,
+ Memory: 6,
+ MinGB: 5,
+ NodeType: "Replica",
+ StorageClasses: []testFlavorStorageClass{
+ {Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ },
+ },
+ /* ......................................................... */
+ {
+ Cpu: 2,
+ Description: "flavor 2.1 replica",
+ Id: "flv2.1r",
+ MaxGB: 500,
+ Memory: 1,
+ MinGB: 5,
+ NodeType: "Replica",
+ StorageClasses: []testFlavorStorageClass{
+ {Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ },
+ },
+ {
+ Cpu: 2,
+ Description: "flavor 2.2 replica",
+ Id: "flv2.2r",
+ MaxGB: 500,
+ Memory: 2,
+ MinGB: 5,
+ NodeType: "Replica",
+ StorageClasses: []testFlavorStorageClass{
+ {Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ },
+ },
+ {
+ Cpu: 2,
+ Description: "flavor 2.3 replica",
+ Id: "flv2.3r",
+ MaxGB: 500,
+ Memory: 3,
+ MinGB: 5,
+ NodeType: "Replica",
+ StorageClasses: []testFlavorStorageClass{
+ {Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ },
+ },
+ {
+ Cpu: 2,
+ Description: "flavor 2.4 replica",
+ Id: "flv2.4r",
+ MaxGB: 500,
+ Memory: 4,
+ MinGB: 5,
+ NodeType: "Replica",
+ StorageClasses: []testFlavorStorageClass{
+ {Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ },
+ },
+ {
+ Cpu: 2,
+ Description: "flavor 2.5 replica",
+ Id: "flv2.5r",
+ MaxGB: 500,
+ Memory: 5,
+ MinGB: 5,
+ NodeType: "Replica",
+ StorageClasses: []testFlavorStorageClass{
+ {Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ },
+ },
+ {
+ Cpu: 2,
+ Description: "flavor 2.6 replica",
+ Id: "flv2.6r",
+ MaxGB: 500,
+ Memory: 6,
+ MinGB: 5,
+ NodeType: "Replica",
+ StorageClasses: []testFlavorStorageClass{
+ {Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ {Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
+ },
+ },
+ /* ......................................................... */
+}
+
+//nolint:unused // TODO: remove when used
+func testFlavorListToResponseFlavorList(f []testFlavor) []postgresflex.ListFlavors {
+ result := make([]postgresflex.ListFlavors, len(f))
+ for i, flavor := range f {
+ result[i] = testFlavorToResponseFlavor(flavor)
}
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- resp := &postgresflex.GetInstanceResponse{
- ConnectionInfo: postgresflex.InstanceConnectionInfo{
- Write: postgresflex.InstanceConnectionInfoWrite{
- Host: tt.args.hostName,
- Port: int32(tt.args.port),
- },
- },
- }
+ return result
+}
- handleConnectionInfo(tt.args.ctx, tt.args.m, resp)
-
- if tt.args.hostName == "" || tt.args.port == 0 {
- if !tt.args.m.ConnectionInfo.IsNull() {
- t.Errorf("expected connection info to be null")
- }
- }
-
- if tt.args.hostName != "" && tt.args.port != 0 {
- res := tt.args.m.ConnectionInfo.Write.Attributes()
- gotHost := ""
- if r, ok := res["host"]; ok {
- gotHost = utils2.RemoveQuotes(r.String())
- }
- if gotHost != tt.args.hostName {
- t.Errorf("host value incorrect: want: %s - got: %s", tt.args.hostName, gotHost)
- }
-
- gotPort, ok := res["port"]
- if !ok {
- t.Errorf("could not find a value for port in connection_info.write")
- }
- if !gotPort.Equal(types.Int64Value(int64(tt.args.port))) {
- t.Errorf("port value incorrect: want: %d - got: %s", tt.args.port, gotPort.String())
- }
- }
- })
+//nolint:unused // TODO: remove when used
+func testFlavorToResponseFlavor(f testFlavor) postgresflex.ListFlavors {
+ var scList []postgresflex.FlavorStorageClassesStorageClass
+ for _, fl := range f.StorageClasses {
+ scList = append(
+ scList, postgresflex.FlavorStorageClassesStorageClass{
+ Class: utils.Ptr(fl.Class),
+ MaxIoPerSec: utils.Ptr(fl.MaxIoPerSec),
+ MaxThroughInMb: utils.Ptr(fl.MaxThroughInMb),
+ },
+ )
+ }
+ return postgresflex.ListFlavors{
+ Cpu: utils.Ptr(f.Cpu),
+ Description: utils.Ptr(f.Description),
+ Id: utils.Ptr(f.Id),
+ MaxGB: utils.Ptr(f.MaxGB),
+ Memory: utils.Ptr(f.Memory),
+ MinGB: utils.Ptr(f.MinGB),
+ NodeType: utils.Ptr(f.NodeType),
+ StorageClasses: &scList,
}
}
-func Test_handleEncryption(t *testing.T) {
- t.Skipf("please implement")
- type args struct {
- m *dataSourceModel
- resp *postgresflex.GetInstanceResponse
- }
- tests := []struct {
- name string
- args args
- }{
- // TODO: Add test cases.
- }
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- handleEncryption(tt.args.m, tt.args.resp)
- t.Logf("need to implement more")
- })
- }
-}
+// func Test_getAllFlavors(t *testing.T) {
+// type args struct {
+// projectId string
+// region string
+// }
+// tests := []struct {
+// name string
+// args args
+// firstItem int
+// lastItem int
+// want []postgresflex.ListFlavors
+// wantErr bool
+// }{
+// {
+// name: "find exactly one flavor",
+// args: args{
+// projectId: "project",
+// region: "region",
+// },
+// firstItem: 0,
+// lastItem: 0,
+// want: []postgresflex.ListFlavors{
+// testFlavorToResponseFlavor(responseList[0]),
+// },
+// wantErr: false,
+// },
+// {
+// name: "get exactly 1 page flavors",
+// args: args{
+// projectId: "project",
+// region: "region",
+// },
+// firstItem: 0,
+// lastItem: 9,
+// want: testFlavorListToResponseFlavorList(responseList[0:10]),
+// wantErr: false,
+// },
+// {
+// name: "get exactly 20 flavors",
+// args: args{
+// projectId: "project",
+// region: "region",
+// },
+// firstItem: 0,
+// lastItem: 20,
+// // 0 indexed therefore we want :21
+// want: testFlavorListToResponseFlavorList(responseList[0:21]),
+// wantErr: false,
+// },
+// {
+// name: "get all flavors",
+// args: args{
+// projectId: "project",
+// region: "region",
+// },
+// firstItem: 0,
+// lastItem: len(responseList),
+// want: testFlavorListToResponseFlavorList(responseList),
+// wantErr: false,
+// },
+// }
+// for _, tt := range tests {
+// t.Run(tt.name, func(t *testing.T) {
+// first := tt.firstItem
+// if first > len(responseList)-1 {
+// first = len(responseList) - 1
+// }
+// last := tt.lastItem
+// if last > len(responseList)-1 {
+// last = len(responseList) - 1
+// }
+// mockClient := postgresFlexClientMocked{
+// returnError: tt.wantErr,
+// firstItem: first,
+// lastItem: last,
+// }
+// got, err := getAllFlavors(context.TODO(), mockClient, tt.args.projectId, tt.args.region)
+// if (err != nil) != tt.wantErr {
+// t.Errorf("getAllFlavors() error = %v, wantErr %v", err, tt.wantErr)
+// return
+// }
+//
+// if diff := cmp.Diff(tt.want, got); diff != "" {
+// t.Errorf("mismatch (-want +got):\n%s", diff)
+// }
+//
+// if !reflect.DeepEqual(got, tt.want) {
+// t.Errorf("getAllFlavors() got = %v, want %v", got, tt.want)
+// }
+// })
+// }
+//}
-func Test_handleNetwork(t *testing.T) {
- t.Skipf("please implement")
- type args struct {
- ctx context.Context
- m *dataSourceModel
- resp *postgresflex.GetInstanceResponse
- }
- tests := []struct {
- name string
- args args
- wantErr bool
- }{
- // TODO: Add test cases.
- }
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- if err := handleNetwork(tt.args.ctx, tt.args.m, tt.args.resp); (err != nil) != tt.wantErr {
- t.Errorf("handleNetwork() error = %v, wantErr %v", err, tt.wantErr)
- }
- })
- }
-}
-
-func Test_mapGetDataInstanceResponseToModel(t *testing.T) {
- t.Skipf("please implement")
- type args struct {
- ctx context.Context
- m *dataSourceModel
- resp *postgresflex.GetInstanceResponse
- }
- tests := []struct {
- name string
- args args
- wantErr bool
- }{
- // TODO: Add test cases.
- }
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- if err := mapGetDataInstanceResponseToModel(tt.args.ctx, tt.args.m, tt.args.resp); (err != nil) != tt.wantErr {
- t.Errorf("mapGetDataInstanceResponseToModel() error = %v, wantErr %v", err, tt.wantErr)
- }
- })
- }
-}
-
-func Test_mapGetInstanceResponseToModel(t *testing.T) {
- t.Skipf("please implement")
- type args struct {
- ctx context.Context
- m *postgresflexalpharesource.InstanceModel
- resp *postgresflex.GetInstanceResponse
- }
- tests := []struct {
- name string
- args args
- wantErr bool
- }{
- // TODO: Add test cases.
- }
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- if err := mapGetInstanceResponseToModel(tt.args.ctx, tt.args.m, tt.args.resp); (err != nil) != tt.wantErr {
- t.Errorf("mapGetInstanceResponseToModel() error = %v, wantErr %v", err, tt.wantErr)
- }
- })
- }
-}
+// func Test_loadFlavorId(t *testing.T) {
+// type args struct {
+// ctx context.Context
+// model *Model
+// storage *storageModel
+// }
+// tests := []struct {
+// name string
+// args args
+// firstItem int
+// lastItem int
+// want []postgresflex.ListFlavors
+// wantErr bool
+// }{
+// {
+// name: "find a single flavor",
+// args: args{
+// ctx: context.Background(),
+// model: &Model{
+// ProjectId: basetypes.NewStringValue("project"),
+// Region: basetypes.NewStringValue("region"),
+// },
+// storage: &storageModel{
+// Class: basetypes.NewStringValue("sc1"),
+// Size: basetypes.NewInt64Value(100),
+// },
+// },
+// firstItem: 0,
+// lastItem: 3,
+// want: []postgresflex.ListFlavors{
+// testFlavorToResponseFlavor(responseList[0]),
+// },
+// wantErr: false,
+// },
+// {
+// name: "find a single flavor by replicas option",
+// args: args{
+// ctx: context.Background(),
+// model: &Model{
+// ProjectId: basetypes.NewStringValue("project"),
+// Region: basetypes.NewStringValue("region"),
+// Replicas: basetypes.NewInt64Value(1),
+// },
+// storage: &storageModel{
+// Class: basetypes.NewStringValue("sc1"),
+// Size: basetypes.NewInt64Value(100),
+// },
+// },
+// firstItem: 0,
+// lastItem: 3,
+// want: []postgresflex.ListFlavors{
+// testFlavorToResponseFlavor(responseList[0]),
+// },
+// wantErr: false,
+// },
+// {
+// name: "fail finding find a single flavor by replicas option",
+// args: args{
+// ctx: context.Background(),
+// model: &Model{
+// ProjectId: basetypes.NewStringValue("project"),
+// Region: basetypes.NewStringValue("region"),
+// Replicas: basetypes.NewInt64Value(1),
+// },
+// storage: &storageModel{
+// Class: basetypes.NewStringValue("sc1"),
+// Size: basetypes.NewInt64Value(100),
+// },
+// },
+// firstItem: 13,
+// lastItem: 23,
+// want: []postgresflex.ListFlavors{},
+// wantErr: true,
+// },
+// {
+// name: "find a replicas flavor lower case",
+// args: args{
+// ctx: context.Background(),
+// model: &Model{
+// ProjectId: basetypes.NewStringValue("project"),
+// Region: basetypes.NewStringValue("region"),
+// },
+// storage: &storageModel{
+// Class: basetypes.NewStringValue("sc1"),
+// Size: basetypes.NewInt64Value(100),
+// },
+// },
+// firstItem: 0,
+// lastItem: len(responseList) - 1,
+// want: []postgresflex.ListFlavors{
+// testFlavorToResponseFlavor(responseList[16]),
+// },
+// wantErr: false,
+// },
+// {
+// name: "find a replicas flavor CamelCase",
+// args: args{
+// ctx: context.Background(),
+// model: &Model{
+// ProjectId: basetypes.NewStringValue("project"),
+// Region: basetypes.NewStringValue("region"),
+// },
+// storage: &storageModel{
+// Class: basetypes.NewStringValue("sc1"),
+// Size: basetypes.NewInt64Value(100),
+// },
+// },
+// firstItem: 0,
+// lastItem: len(responseList) - 1,
+// want: []postgresflex.ListFlavors{
+// testFlavorToResponseFlavor(responseList[16]),
+// },
+// wantErr: false,
+// },
+// {
+// name: "find a replicas flavor by replicas option",
+// args: args{
+// ctx: context.Background(),
+// model: &Model{
+// ProjectId: basetypes.NewStringValue("project"),
+// Region: basetypes.NewStringValue("region"),
+// Replicas: basetypes.NewInt64Value(3),
+// },
+// flavor: &flavorModel{
+// CPU: basetypes.NewInt64Value(1),
+// RAM: basetypes.NewInt64Value(1),
+// },
+// storage: &storageModel{
+// Class: basetypes.NewStringValue("sc1"),
+// Size: basetypes.NewInt64Value(100),
+// },
+// },
+// firstItem: 0,
+// lastItem: len(responseList) - 1,
+// want: []postgresflex.ListFlavors{
+// testFlavorToResponseFlavor(responseList[16]),
+// },
+// wantErr: false,
+// },
+// {
+// name: "fail finding a replica flavor",
+// args: args{
+// ctx: context.Background(),
+// model: &Model{
+// ProjectId: basetypes.NewStringValue("project"),
+// Region: basetypes.NewStringValue("region"),
+// Replicas: basetypes.NewInt64Value(3),
+// },
+// flavor: &flavorModel{
+// CPU: basetypes.NewInt64Value(1),
+// RAM: basetypes.NewInt64Value(1),
+// },
+// storage: &storageModel{
+// Class: basetypes.NewStringValue("sc1"),
+// Size: basetypes.NewInt64Value(100),
+// },
+// },
+// firstItem: 0,
+// lastItem: 10,
+// want: []postgresflex.ListFlavors{},
+// wantErr: true,
+// },
+// {
+// name: "no flavor found error",
+// args: args{
+// ctx: context.Background(),
+// model: &Model{
+// ProjectId: basetypes.NewStringValue("project"),
+// Region: basetypes.NewStringValue("region"),
+// },
+// flavor: &flavorModel{
+// CPU: basetypes.NewInt64Value(10),
+// RAM: basetypes.NewInt64Value(1000),
+// NodeType: basetypes.NewStringValue("Single"),
+// },
+// storage: &storageModel{
+// Class: basetypes.NewStringValue("sc1"),
+// Size: basetypes.NewInt64Value(100),
+// },
+// },
+// firstItem: 0,
+// lastItem: 3,
+// want: []postgresflex.ListFlavors{},
+// wantErr: true,
+// },
+// }
+// for _, tt := range tests {
+// t.Run(tt.name, func(t *testing.T) {
+// first := tt.firstItem
+// if first > len(responseList)-1 {
+// first = len(responseList) - 1
+// }
+// last := tt.lastItem
+// if last > len(responseList)-1 {
+// last = len(responseList) - 1
+// }
+// mockClient := postgresFlexClientMocked{
+// returnError: tt.wantErr,
+// firstItem: first,
+// lastItem: last,
+// }
+// if err := loadFlavorId(tt.args.ctx, mockClient, tt.args.model, tt.args.flavor, tt.args.storage); (err != nil) != tt.wantErr {
+// t.Errorf("loadFlavorId() error = %v, wantErr %v", err, tt.wantErr)
+// }
+// })
+// }
+//}
diff --git a/stackit/internal/services/postgresflexalpha/instance/resource.go b/stackit/internal/services/postgresflexalpha/instance/resource.go
index d07bf546..f061f8bf 100644
--- a/stackit/internal/services/postgresflexalpha/instance/resource.go
+++ b/stackit/internal/services/postgresflexalpha/instance/resource.go
@@ -14,9 +14,7 @@ import (
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-log/tflog"
"github.com/stackitcloud/stackit-sdk-go/core/oapierror"
- coreUtils "github.com/stackitcloud/stackit-sdk-go/core/utils"
- "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
-
+ postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
postgresflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/resources_gen"
@@ -25,6 +23,8 @@ import (
wait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/postgresflexalpha"
)
+const packageName = "postgresflexalpha"
+
// Ensure the implementation satisfies the expected interfaces.
var (
_ resource.Resource = &instanceResource{}
@@ -40,8 +40,11 @@ func NewInstanceResource() resource.Resource {
return &instanceResource{}
}
-// resourceModel describes the resource data model.
-type resourceModel = postgresflexalpha.InstanceModel
+// instanceResource is the resource implementation.
+type instanceResource struct {
+ client *postgresflex.APIClient
+ providerData core.ProviderData
+}
type InstanceResourceIdentityModel struct {
ProjectID types.String `tfsdk:"project_id"`
@@ -49,18 +52,8 @@ type InstanceResourceIdentityModel struct {
InstanceID types.String `tfsdk:"instance_id"`
}
-// instanceResource is the resource implementation.
-type instanceResource struct {
- client *v3alpha1api.APIClient
- providerData core.ProviderData
-}
-
-func (r *instanceResource) ValidateConfig(
- ctx context.Context,
- req resource.ValidateConfigRequest,
- resp *resource.ValidateConfigResponse,
-) {
- var data resourceModel
+func (r *instanceResource) ValidateConfig(ctx context.Context, req resource.ValidateConfigRequest, resp *resource.ValidateConfigResponse) {
+ var data postgresflexalpha.InstanceModel
resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
if resp.Diagnostics.HasError() {
@@ -79,12 +72,8 @@ func (r *instanceResource) ValidateConfig(
// ModifyPlan implements resource.ResourceWithModifyPlan.
// Use the modifier to set the effective region in the current plan.
-func (r *instanceResource) ModifyPlan(
- ctx context.Context,
- req resource.ModifyPlanRequest,
- resp *resource.ModifyPlanResponse,
-) { // nolint:gocritic // function signature required by Terraform
- var configModel resourceModel
+func (r *instanceResource) ModifyPlan(ctx context.Context, req resource.ModifyPlanRequest, resp *resource.ModifyPlanResponse) { // nolint:gocritic // function signature required by Terraform
+ var configModel postgresflexalpha.InstanceModel
// skip initial empty configuration to avoid follow-up errors
if req.Config.Raw.IsNull() {
return
@@ -94,7 +83,7 @@ func (r *instanceResource) ModifyPlan(
return
}
- var planModel resourceModel
+ var planModel postgresflexalpha.InstanceModel
resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...)
if resp.Diagnostics.HasError() {
return
@@ -146,13 +135,13 @@ var modifiersFileByte []byte
// Schema defines the schema for the resource.
func (r *instanceResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
schema := postgresflexalpha.InstanceResourceSchema(ctx)
- fields, err := utils.ReadModifiersConfig(modifiersFileByte)
+ fields, err := postgresflexUtils.ReadModifiersConfig(modifiersFileByte)
if err != nil {
resp.Diagnostics.AddError("error during read modifiers config file", err.Error())
return
}
- err = utils.AddPlanModifiersToResourceSchema(fields, &schema)
+ err = postgresflexUtils.AddPlanModifiersToResourceSchema(fields, &schema)
if err != nil {
resp.Diagnostics.AddError("error adding plan modifiers", err.Error())
return
@@ -160,11 +149,7 @@ func (r *instanceResource) Schema(ctx context.Context, _ resource.SchemaRequest,
resp.Schema = schema
}
-func (r *instanceResource) IdentitySchema(
- _ context.Context,
- _ resource.IdentitySchemaRequest,
- resp *resource.IdentitySchemaResponse,
-) {
+func (r *instanceResource) IdentitySchema(_ context.Context, _ resource.IdentitySchemaRequest, resp *resource.IdentitySchemaResponse) {
resp.IdentitySchema = identityschema.Schema{
Attributes: map[string]identityschema.Attribute{
"project_id": identityschema.StringAttribute{
@@ -186,7 +171,7 @@ func (r *instanceResource) Create(
req resource.CreateRequest,
resp *resource.CreateResponse,
) { // nolint:gocritic // function signature required by Terraform
- var model resourceModel
+ var model postgresflexalpha.InstanceModel
diags := req.Plan.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
@@ -196,9 +181,9 @@ func (r *instanceResource) Create(
ctx = core.InitProviderContext(ctx)
- projectID := model.ProjectId.ValueString()
+ projectId := model.ProjectId.ValueString()
region := model.Region.ValueString()
- ctx = tflog.SetField(ctx, "project_id", projectID)
+ ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "region", region)
var netAcl []string
@@ -208,22 +193,22 @@ func (r *instanceResource) Create(
return
}
- replVal := model.Replicas.ValueInt64() // nolint:gosec // check is performed above
+ if model.Replicas.ValueInt64() > math.MaxInt32 {
+ resp.Diagnostics.AddError("invalid int32 value", "provided int64 value does not fit into int32")
+ return
+ }
+ replVal := int32(model.Replicas.ValueInt64()) // nolint:gosec // check is performed above
payload := modelToCreateInstancePayload(netAcl, model, replVal)
// Create new instance
- createResp, err := r.client.DefaultAPI.CreateInstanceRequest(
- ctx,
- projectID,
- region,
- ).CreateInstanceRequestPayload(payload).Execute()
+ createResp, err := r.client.CreateInstanceRequest(ctx, projectId, region).CreateInstanceRequestPayload(payload).Execute()
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "error creating instance", fmt.Sprintf("Calling API: %v", err))
return
}
ctx = core.LogResponse(ctx)
- instanceID, ok := createResp.GetIdOk()
+ instanceId, ok := createResp.GetIdOk()
if !ok {
core.LogAndAddError(ctx, &resp.Diagnostics, "error creating instance", "could not find instance id in response")
return
@@ -231,35 +216,24 @@ func (r *instanceResource) Create(
// Set data returned by API in identity
identity := InstanceResourceIdentityModel{
- ProjectID: types.StringValue(projectID),
+ ProjectID: types.StringValue(projectId),
Region: types.StringValue(region),
- InstanceID: types.StringPointerValue(instanceID),
+ InstanceID: types.StringValue(instanceId),
}
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
if resp.Diagnostics.HasError() {
return
}
- waitResp, err := wait.CreateInstanceWaitHandler(ctx, r.client.DefaultAPI, projectID, region, *instanceID).
- WaitWithContext(ctx)
+ waitResp, err := wait.CreateInstanceWaitHandler(ctx, r.client, projectId, region, instanceId).WaitWithContext(ctx)
if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error creating instance",
- fmt.Sprintf("Wait handler error: %v", err),
- )
+ core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating instance", fmt.Sprintf("Wait handler error: %v", err))
return
}
err = mapGetInstanceResponseToModel(ctx, &model, waitResp)
if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error creating instance",
- fmt.Sprintf("Error creating model: %v", err),
- )
+ core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating instance", fmt.Sprintf("Error creating model: %v", err))
return
}
@@ -272,77 +246,100 @@ func (r *instanceResource) Create(
tflog.Info(ctx, "Postgres Flex instance created")
}
-func modelToCreateInstancePayload(
- netACL []string,
- model postgresflexalpha.InstanceModel,
- replVal int64,
-) v3alpha1api.CreateInstanceRequestPayload {
- var enc *v3alpha1api.InstanceEncryption
+func modelToCreateInstancePayload(netAcl []string, model postgresflexalpha.InstanceModel, replVal int32) postgresflex.CreateInstanceRequestPayload {
+ var enc *postgresflex.InstanceEncryption
if !model.Encryption.IsNull() && !model.Encryption.IsUnknown() {
- enc = &v3alpha1api.InstanceEncryption{
- KekKeyId: model.Encryption.KekKeyId.ValueString(),
- KekKeyRingId: model.Encryption.KekKeyRingId.ValueString(),
- KekKeyVersion: model.Encryption.KekKeyVersion.ValueString(),
- ServiceAccount: model.Encryption.ServiceAccount.ValueString(),
+ enc = &postgresflex.InstanceEncryption{
+ KekKeyId: model.Encryption.KekKeyId.ValueStringPointer(),
+ KekKeyRingId: model.Encryption.KekKeyRingId.ValueStringPointer(),
+ KekKeyVersion: model.Encryption.KekKeyVersion.ValueStringPointer(),
+ ServiceAccount: model.Encryption.ServiceAccount.ValueStringPointer(),
}
}
- payload := v3alpha1api.CreateInstanceRequestPayload{
- BackupSchedule: model.BackupSchedule.ValueString(),
+ payload := postgresflex.CreateInstanceRequestPayload{
+ BackupSchedule: model.BackupSchedule.ValueStringPointer(),
Encryption: enc,
- FlavorId: model.FlavorId.ValueString(),
- Name: model.Name.ValueString(),
- Network: v3alpha1api.InstanceNetworkCreate{
- AccessScope: (*v3alpha1api.InstanceNetworkAccessScope)(model.Network.AccessScope.ValueStringPointer()),
- Acl: netACL,
+ FlavorId: model.FlavorId.ValueStringPointer(),
+ Name: model.Name.ValueStringPointer(),
+ Network: &postgresflex.InstanceNetworkCreate{
+ AccessScope: postgresflex.InstanceNetworkGetAccessScopeAttributeType(
+ model.Network.AccessScope.ValueStringPointer(),
+ ),
+ Acl: &netAcl,
},
- Replicas: v3alpha1api.Replicas(replVal), //nolint:gosec // TODO
- RetentionDays: int32(model.RetentionDays.ValueInt64()), //nolint:gosec // TODO
- Storage: v3alpha1api.StorageCreate{
- PerformanceClass: model.Storage.PerformanceClass.ValueString(),
- Size: int32(model.Storage.Size.ValueInt64()), //nolint:gosec // TODO
+ Replicas: postgresflex.CreateInstanceRequestPayloadGetReplicasAttributeType(&replVal),
+ RetentionDays: model.RetentionDays.ValueInt64Pointer(),
+ Storage: &postgresflex.StorageCreate{
+ PerformanceClass: model.Storage.PerformanceClass.ValueStringPointer(),
+ Size: model.Storage.Size.ValueInt64Pointer(),
},
- Version: model.Version.ValueString(),
+ Version: model.Version.ValueStringPointer(),
}
return payload
}
// Read refreshes the Terraform state with the latest data.
-func (r *instanceResource) Read(
- ctx context.Context,
- req resource.ReadRequest,
- resp *resource.ReadResponse,
-) { // nolint:gocritic // function signature required by Terraform
+func (r *instanceResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { // nolint:gocritic // function signature required by Terraform
functionErrorSummary := "read instance failed"
- var model resourceModel
+ var model postgresflexalpha.InstanceModel
diags := req.State.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
+ // Read identity data
+ var identityData InstanceResourceIdentityModel
+ resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
ctx = core.InitProviderContext(ctx)
+ // projectId := model.ProjectId.ValueString()
+ // region := r.providerData.GetRegionWithOverride(model.Region)
+ // instanceId := model.InstanceId.ValueString()
+
var projectId string
if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() {
projectId = model.ProjectId.ValueString()
+ } else {
+ if identityData.ProjectID.IsNull() || identityData.ProjectID.IsUnknown() {
+ core.LogAndAddError(ctx, &resp.Diagnostics, functionErrorSummary, "project_id not found in config")
+ return
+ }
+ projectId = identityData.ProjectID.ValueString()
}
var region string
if !model.Region.IsNull() && !model.Region.IsUnknown() {
region = r.providerData.GetRegionWithOverride(model.Region)
+ } else {
+ if identityData.Region.IsNull() || identityData.Region.IsUnknown() {
+ core.LogAndAddError(ctx, &resp.Diagnostics, functionErrorSummary, "region not found in config")
+ return
+ }
+ region = r.providerData.GetRegionWithOverride(identityData.Region)
}
var instanceId string
if !model.InstanceId.IsNull() && !model.InstanceId.IsUnknown() {
instanceId = model.InstanceId.ValueString()
+ } else {
+ if identityData.InstanceID.IsNull() || identityData.InstanceID.IsUnknown() {
+ core.LogAndAddError(ctx, &resp.Diagnostics, functionErrorSummary, "instance_id not found in config")
+ return
+ }
+ instanceId = identityData.InstanceID.ValueString()
}
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "instance_id", instanceId)
ctx = tflog.SetField(ctx, "region", region)
- instanceResp, err := r.client.DefaultAPI.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
+ instanceResp, err := r.client.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
if err != nil {
oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
if ok && oapiErr.StatusCode == http.StatusNotFound {
@@ -361,7 +358,7 @@ func (r *instanceResource) Read(
return
}
if !model.InstanceId.IsUnknown() && !model.InstanceId.IsNull() {
- if *respInstanceID != instanceId {
+ if respInstanceID != instanceId {
core.LogAndAddError(
ctx,
&resp.Diagnostics,
@@ -374,12 +371,7 @@ func (r *instanceResource) Read(
err = mapGetInstanceResponseToModel(ctx, &model, instanceResp)
if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- functionErrorSummary,
- fmt.Sprintf("Processing API payload: %v", err),
- )
+ core.LogAndAddError(ctx, &resp.Diagnostics, functionErrorSummary, fmt.Sprintf("Processing API payload: %v", err))
return
}
@@ -404,12 +396,8 @@ func (r *instanceResource) Read(
}
// Update updates the resource and sets the updated Terraform state on success.
-func (r *instanceResource) Update(
- ctx context.Context,
- req resource.UpdateRequest,
- resp *resource.UpdateResponse,
-) { // nolint:gocritic // function signature required by Terraform
- var model resourceModel
+func (r *instanceResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { // nolint:gocritic // function signature required by Terraform
+ var model postgresflexalpha.InstanceModel
diags := req.Plan.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
@@ -426,56 +414,59 @@ func (r *instanceResource) Update(
return
}
- projectID := identityData.ProjectID.ValueString()
- instanceID := identityData.InstanceID.ValueString()
+ //if model.InstanceId.IsNull() || model.InstanceId.IsUnknown() {
+ // core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating instance", "instanceId is null or unknown")
+ // return
+ //}
+ //
+ //if model.ProjectId.IsNull() || model.ProjectId.IsUnknown() {
+ // core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating instance", "projectId is null or unknown")
+ // return
+ //}
+
+ //projectId := model.ProjectId.ValueString()
+ //instanceId := model.InstanceId.ValueString()
+ projectId := identityData.ProjectID.ValueString()
+ instanceId := identityData.InstanceID.ValueString()
region := model.Region.ValueString()
- ctx = tflog.SetField(ctx, "project_id", projectID)
- ctx = tflog.SetField(ctx, "instance_id", instanceID)
+ ctx = tflog.SetField(ctx, "project_id", projectId)
+ ctx = tflog.SetField(ctx, "instance_id", instanceId)
ctx = tflog.SetField(ctx, "region", region)
- var netACL []string
- diag := model.Network.Acl.ElementsAs(ctx, &netACL, false)
+ var netAcl []string
+ diag := model.Network.Acl.ElementsAs(ctx, &netAcl, false)
resp.Diagnostics.Append(diags...)
if diag.HasError() {
return
}
if model.Replicas.ValueInt64() > math.MaxInt32 {
- core.LogAndAddError(ctx, &resp.Diagnostics, "UPDATE", "replicas value too large for int32")
+ resp.Diagnostics.AddError("invalid int32 value", "provided int64 value does not fit into int32")
return
}
+ replInt32 := int32(model.Replicas.ValueInt64()) // nolint:gosec // check is performed above
- if model.RetentionDays.ValueInt64() > math.MaxInt32 {
- core.LogAndAddError(ctx, &resp.Diagnostics, "UPDATE", "retention_days value too large for int32")
- return
- }
-
- if model.Storage.Size.ValueInt64() > math.MaxInt32 {
- core.LogAndAddError(ctx, &resp.Diagnostics, "UPDATE", "storage.size value too large for int32")
- return
- }
-
- payload := v3alpha1api.UpdateInstanceRequestPayload{
- BackupSchedule: model.BackupSchedule.ValueString(),
- FlavorId: model.FlavorId.ValueString(),
- Name: model.Name.ValueString(),
- Network: v3alpha1api.InstanceNetworkUpdate{
- Acl: netACL,
+ payload := postgresflex.UpdateInstanceRequestPayload{
+ BackupSchedule: model.BackupSchedule.ValueStringPointer(),
+ FlavorId: model.FlavorId.ValueStringPointer(),
+ Name: model.Name.ValueStringPointer(),
+ Network: &postgresflex.InstanceNetworkUpdate{
+ Acl: &netAcl,
},
- Replicas: v3alpha1api.Replicas(model.Replicas.ValueInt64()), //nolint:gosec // checked above
- RetentionDays: int32(model.RetentionDays.ValueInt64()), //nolint:gosec // checked above
- Storage: v3alpha1api.StorageUpdate{
- Size: coreUtils.Ptr(int32(model.Storage.Size.ValueInt64())), //nolint:gosec // checked above
+ Replicas: postgresflex.UpdateInstanceRequestPayloadGetReplicasAttributeType(&replInt32),
+ RetentionDays: model.RetentionDays.ValueInt64Pointer(),
+ Storage: &postgresflex.StorageUpdate{
+ Size: model.Storage.Size.ValueInt64Pointer(),
},
- Version: model.Version.ValueString(),
+ Version: model.Version.ValueStringPointer(),
}
// Update existing instance
- err := r.client.DefaultAPI.UpdateInstanceRequest(
+ err := r.client.UpdateInstanceRequest(
ctx,
- projectID,
+ projectId,
region,
- instanceID,
+ instanceId,
).UpdateInstanceRequestPayload(payload).Execute()
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating instance", err.Error())
@@ -484,31 +475,15 @@ func (r *instanceResource) Update(
ctx = core.LogResponse(ctx)
- waitResp, err := wait.PartialUpdateInstanceWaitHandler(
- ctx,
- r.client.DefaultAPI,
- projectID,
- region,
- instanceID,
- ).WaitWithContext(ctx)
+ waitResp, err := wait.PartialUpdateInstanceWaitHandler(ctx, r.client, projectId, region, instanceId).WaitWithContext(ctx)
if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error updating instance",
- fmt.Sprintf("Instance update waiting: %v", err),
- )
+ core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating instance", fmt.Sprintf("Instance update waiting: %v", err))
return
}
err = mapGetInstanceResponseToModel(ctx, &model, waitResp)
if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error updating instance",
- fmt.Sprintf("Processing API payload: %v", err),
- )
+ core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating instance", fmt.Sprintf("Processing API payload: %v", err))
return
}
@@ -521,12 +496,8 @@ func (r *instanceResource) Update(
}
// Delete deletes the resource and removes the Terraform state on success.
-func (r *instanceResource) Delete(
- ctx context.Context,
- req resource.DeleteRequest,
- resp *resource.DeleteResponse,
-) { // nolint:gocritic // function signature required by Terraform
- var model resourceModel
+func (r *instanceResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { // nolint:gocritic // function signature required by Terraform
+ var model postgresflexalpha.InstanceModel
diags := req.State.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
@@ -544,7 +515,7 @@ func (r *instanceResource) Delete(
ctx = tflog.SetField(ctx, "region", region)
// Delete existing instance
- err := r.client.DefaultAPI.DeleteInstanceRequest(ctx, projectId, region, instanceId).Execute()
+ err := r.client.DeleteInstanceRequest(ctx, projectId, region, instanceId).Execute()
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting instance", fmt.Sprintf("Calling API: %v", err))
return
@@ -552,7 +523,7 @@ func (r *instanceResource) Delete(
ctx = core.LogResponse(ctx)
- _, err = r.client.DefaultAPI.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
+ _, err = r.client.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
if err != nil {
oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
if ok && oapiErr.StatusCode != http.StatusNotFound {
@@ -567,24 +538,16 @@ func (r *instanceResource) Delete(
// ImportState imports a resource into the Terraform state on success.
// The expected format of the resource import identifier is: project_id,region,instance_id
-func (r *instanceResource) ImportState(
- ctx context.Context,
- req resource.ImportStateRequest,
- resp *resource.ImportStateResponse,
-) {
+func (r *instanceResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
ctx = core.InitProviderContext(ctx)
if req.ID != "" {
idParts := strings.Split(req.ID, core.Separator)
if len(idParts) != 3 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" {
- core.LogAndAddError(
- ctx, &resp.Diagnostics,
+ core.LogAndAddError(ctx, &resp.Diagnostics,
"Error importing instance",
- fmt.Sprintf(
- "Expected import identifier with format [project_id],[region],[instance_id] Got: %q",
- req.ID,
- ),
+ fmt.Sprintf("Expected import identifier with format: [project_id],[region],[instance_id] Got: %q", req.ID),
)
return
}
@@ -595,20 +558,25 @@ func (r *instanceResource) ImportState(
return
}
- // If no ID is provided, attempt to read identity attributes from the import configuration
var identityData InstanceResourceIdentityModel
resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
if resp.Diagnostics.HasError() {
return
}
- projectId := identityData.ProjectID.ValueString()
- region := identityData.Region.ValueString()
- instanceId := identityData.InstanceID.ValueString()
-
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...)
+ resp.Diagnostics.Append(
+ resp.State.SetAttribute(
+ ctx,
+ path.Root("id"),
+ utils.BuildInternalTerraformId(
+ identityData.ProjectID.ValueString(),
+ identityData.Region.ValueString(),
+ identityData.InstanceID.ValueString(),
+ ),
+ )...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), identityData.ProjectID.ValueString())...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), identityData.Region.ValueString())...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), identityData.InstanceID.ValueString())...)
tflog.Info(ctx, "Postgres Flex instance state imported")
}
diff --git a/stackit/internal/services/postgresflexalpha/instance/resource_test.go b/stackit/internal/services/postgresflexalpha/instance/resource_test.go
new file mode 100644
index 00000000..46d935a5
--- /dev/null
+++ b/stackit/internal/services/postgresflexalpha/instance/resource_test.go
@@ -0,0 +1,40 @@
+package postgresflexalpha
+
+import (
+ "reflect"
+ "testing"
+
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+)
+
+// type postgresFlexClientMocked struct {
+// returnError bool
+// getFlavorsResp *postgresflex.GetFlavorsResponse
+// }
+//
+// func (c *postgresFlexClientMocked) ListFlavorsExecute(_ context.Context, _, _ string) (*postgresflex.GetFlavorsResponse, error) {
+// if c.returnError {
+// return nil, fmt.Errorf("get flavors failed")
+// }
+//
+// return c.getFlavorsResp, nil
+// }
+
+func TestNewInstanceResource(t *testing.T) {
+ tests := []struct {
+ name string
+ want resource.Resource
+ }{
+ {
+ name: "create empty instance resource",
+ want: &instanceResource{},
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ if got := NewInstanceResource(); !reflect.DeepEqual(got, tt.want) {
+ t.Errorf("NewInstanceResource() = %v, want %v", got, tt.want)
+ }
+ })
+ }
+}
diff --git a/stackit/internal/services/postgresflexalpha/instance/resources_gen/instance_resource_gen.go b/stackit/internal/services/postgresflexalpha/instance/resources_gen/instance_resource_gen.go
index 7d7969a6..35d31cbc 100644
--- a/stackit/internal/services/postgresflexalpha/instance/resources_gen/instance_resource_gen.go
+++ b/stackit/internal/services/postgresflexalpha/instance/resources_gen/instance_resource_gen.go
@@ -30,32 +30,20 @@ func InstanceResourceSchema(ctx context.Context) schema.Schema {
},
"backup_schedule": schema.StringAttribute{
Required: true,
- Description: "The schedule for when the database backup will be created. Currently, ONLY daily schedules are supported (every 24 hours). The schedule is written as a cron schedule.",
- MarkdownDescription: "The schedule for when the database backup will be created. Currently, ONLY daily schedules are supported (every 24 hours). The schedule is written as a cron schedule.",
+ Description: "The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.",
+ MarkdownDescription: "The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.",
},
"connection_info": schema.SingleNestedAttribute{
Attributes: map[string]schema.Attribute{
- "write": schema.SingleNestedAttribute{
- Attributes: map[string]schema.Attribute{
- "host": schema.StringAttribute{
- Computed: true,
- Description: "The host of the instance.",
- MarkdownDescription: "The host of the instance.",
- },
- "port": schema.Int64Attribute{
- Computed: true,
- Description: "The port of the instance.",
- MarkdownDescription: "The port of the instance.",
- },
- },
- CustomType: WriteType{
- ObjectType: types.ObjectType{
- AttrTypes: WriteValue{}.AttributeTypes(ctx),
- },
- },
+ "host": schema.StringAttribute{
Computed: true,
- Description: "The DNS name and port in the instance overview",
- MarkdownDescription: "The DNS name and port in the instance overview",
+ Description: "The host of the instance.",
+ MarkdownDescription: "The host of the instance.",
+ },
+ "port": schema.Int64Attribute{
+ Computed: true,
+ Description: "The port of the instance.",
+ MarkdownDescription: "The port of the instance.",
},
},
CustomType: ConnectionInfoType{
@@ -64,8 +52,8 @@ func InstanceResourceSchema(ctx context.Context) schema.Schema {
},
},
Computed: true,
- Description: "The connection information of the instance",
- MarkdownDescription: "The connection information of the instance",
+ Description: "The DNS name and port in the instance overview",
+ MarkdownDescription: "The DNS name and port in the instance overview",
},
"encryption": schema.SingleNestedAttribute{
Attributes: map[string]schema.Attribute{
@@ -275,22 +263,40 @@ func (t ConnectionInfoType) ValueFromObject(ctx context.Context, in basetypes.Ob
attributes := in.Attributes()
- writeAttribute, ok := attributes["write"]
+ hostAttribute, ok := attributes["host"]
if !ok {
diags.AddError(
"Attribute Missing",
- `write is missing from object`)
+ `host is missing from object`)
return nil, diags
}
- writeVal, ok := writeAttribute.(basetypes.ObjectValue)
+ hostVal, ok := hostAttribute.(basetypes.StringValue)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`write expected to be basetypes.ObjectValue, was: %T`, writeAttribute))
+ fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
+ }
+
+ portAttribute, ok := attributes["port"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `port is missing from object`)
+
+ return nil, diags
+ }
+
+ portVal, ok := portAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
}
if diags.HasError() {
@@ -298,7 +304,8 @@ func (t ConnectionInfoType) ValueFromObject(ctx context.Context, in basetypes.Ob
}
return ConnectionInfoValue{
- Write: writeVal,
+ Host: hostVal,
+ Port: portVal,
state: attr.ValueStateKnown,
}, diags
}
@@ -366,22 +373,40 @@ func NewConnectionInfoValue(attributeTypes map[string]attr.Type, attributes map[
return NewConnectionInfoValueUnknown(), diags
}
- writeAttribute, ok := attributes["write"]
+ hostAttribute, ok := attributes["host"]
if !ok {
diags.AddError(
"Attribute Missing",
- `write is missing from object`)
+ `host is missing from object`)
return NewConnectionInfoValueUnknown(), diags
}
- writeVal, ok := writeAttribute.(basetypes.ObjectValue)
+ hostVal, ok := hostAttribute.(basetypes.StringValue)
if !ok {
diags.AddError(
"Attribute Wrong Type",
- fmt.Sprintf(`write expected to be basetypes.ObjectValue, was: %T`, writeAttribute))
+ fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
+ }
+
+ portAttribute, ok := attributes["port"]
+
+ if !ok {
+ diags.AddError(
+ "Attribute Missing",
+ `port is missing from object`)
+
+ return NewConnectionInfoValueUnknown(), diags
+ }
+
+ portVal, ok := portAttribute.(basetypes.Int64Value)
+
+ if !ok {
+ diags.AddError(
+ "Attribute Wrong Type",
+ fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
}
if diags.HasError() {
@@ -389,7 +414,8 @@ func NewConnectionInfoValue(attributeTypes map[string]attr.Type, attributes map[
}
return ConnectionInfoValue{
- Write: writeVal,
+ Host: hostVal,
+ Port: portVal,
state: attr.ValueStateKnown,
}, diags
}
@@ -462,401 +488,12 @@ func (t ConnectionInfoType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = ConnectionInfoValue{}
type ConnectionInfoValue struct {
- Write basetypes.ObjectValue `tfsdk:"write"`
- state attr.ValueState
-}
-
-func (v ConnectionInfoValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 1)
-
- var val tftypes.Value
- var err error
-
- attrTypes["write"] = basetypes.ObjectType{
- AttrTypes: WriteValue{}.AttributeTypes(ctx),
- }.TerraformType(ctx)
-
- objectType := tftypes.Object{AttributeTypes: attrTypes}
-
- switch v.state {
- case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 1)
-
- val, err = v.Write.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["write"] = val
-
- if err := tftypes.ValidateValue(objectType, vals); err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- return tftypes.NewValue(objectType, vals), nil
- case attr.ValueStateNull:
- return tftypes.NewValue(objectType, nil), nil
- case attr.ValueStateUnknown:
- return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
- default:
- panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
- }
-}
-
-func (v ConnectionInfoValue) IsNull() bool {
- return v.state == attr.ValueStateNull
-}
-
-func (v ConnectionInfoValue) IsUnknown() bool {
- return v.state == attr.ValueStateUnknown
-}
-
-func (v ConnectionInfoValue) String() string {
- return "ConnectionInfoValue"
-}
-
-func (v ConnectionInfoValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- var write basetypes.ObjectValue
-
- if v.Write.IsNull() {
- write = types.ObjectNull(
- WriteValue{}.AttributeTypes(ctx),
- )
- }
-
- if v.Write.IsUnknown() {
- write = types.ObjectUnknown(
- WriteValue{}.AttributeTypes(ctx),
- )
- }
-
- if !v.Write.IsNull() && !v.Write.IsUnknown() {
- write = types.ObjectValueMust(
- WriteValue{}.AttributeTypes(ctx),
- v.Write.Attributes(),
- )
- }
-
- attributeTypes := map[string]attr.Type{
- "write": basetypes.ObjectType{
- AttrTypes: WriteValue{}.AttributeTypes(ctx),
- },
- }
-
- if v.IsNull() {
- return types.ObjectNull(attributeTypes), diags
- }
-
- if v.IsUnknown() {
- return types.ObjectUnknown(attributeTypes), diags
- }
-
- objVal, diags := types.ObjectValue(
- attributeTypes,
- map[string]attr.Value{
- "write": write,
- })
-
- return objVal, diags
-}
-
-func (v ConnectionInfoValue) Equal(o attr.Value) bool {
- other, ok := o.(ConnectionInfoValue)
-
- if !ok {
- return false
- }
-
- if v.state != other.state {
- return false
- }
-
- if v.state != attr.ValueStateKnown {
- return true
- }
-
- if !v.Write.Equal(other.Write) {
- return false
- }
-
- return true
-}
-
-func (v ConnectionInfoValue) Type(ctx context.Context) attr.Type {
- return ConnectionInfoType{
- basetypes.ObjectType{
- AttrTypes: v.AttributeTypes(ctx),
- },
- }
-}
-
-func (v ConnectionInfoValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
- return map[string]attr.Type{
- "write": basetypes.ObjectType{
- AttrTypes: WriteValue{}.AttributeTypes(ctx),
- },
- }
-}
-
-var _ basetypes.ObjectTypable = WriteType{}
-
-type WriteType struct {
- basetypes.ObjectType
-}
-
-func (t WriteType) Equal(o attr.Type) bool {
- other, ok := o.(WriteType)
-
- if !ok {
- return false
- }
-
- return t.ObjectType.Equal(other.ObjectType)
-}
-
-func (t WriteType) String() string {
- return "WriteType"
-}
-
-func (t WriteType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributes := in.Attributes()
-
- hostAttribute, ok := attributes["host"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `host is missing from object`)
-
- return nil, diags
- }
-
- hostVal, ok := hostAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
- }
-
- portAttribute, ok := attributes["port"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `port is missing from object`)
-
- return nil, diags
- }
-
- portVal, ok := portAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
- }
-
- if diags.HasError() {
- return nil, diags
- }
-
- return WriteValue{
- Host: hostVal,
- Port: portVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewWriteValueNull() WriteValue {
- return WriteValue{
- state: attr.ValueStateNull,
- }
-}
-
-func NewWriteValueUnknown() WriteValue {
- return WriteValue{
- state: attr.ValueStateUnknown,
- }
-}
-
-func NewWriteValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (WriteValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
- ctx := context.Background()
-
- for name, attributeType := range attributeTypes {
- attribute, ok := attributes[name]
-
- if !ok {
- diags.AddError(
- "Missing WriteValue Attribute Value",
- "While creating a WriteValue value, a missing attribute value was detected. "+
- "A WriteValue must contain values for all attributes, even if null or unknown. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("WriteValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
- )
-
- continue
- }
-
- if !attributeType.Equal(attribute.Type(ctx)) {
- diags.AddError(
- "Invalid WriteValue Attribute Type",
- "While creating a WriteValue value, an invalid attribute value was detected. "+
- "A WriteValue must use a matching attribute type for the value. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("WriteValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
- fmt.Sprintf("WriteValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
- )
- }
- }
-
- for name := range attributes {
- _, ok := attributeTypes[name]
-
- if !ok {
- diags.AddError(
- "Extra WriteValue Attribute Value",
- "While creating a WriteValue value, an extra attribute value was detected. "+
- "A WriteValue must not contain values beyond the expected attribute types. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("Extra WriteValue Attribute Name: %s", name),
- )
- }
- }
-
- if diags.HasError() {
- return NewWriteValueUnknown(), diags
- }
-
- hostAttribute, ok := attributes["host"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `host is missing from object`)
-
- return NewWriteValueUnknown(), diags
- }
-
- hostVal, ok := hostAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
- }
-
- portAttribute, ok := attributes["port"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `port is missing from object`)
-
- return NewWriteValueUnknown(), diags
- }
-
- portVal, ok := portAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
- }
-
- if diags.HasError() {
- return NewWriteValueUnknown(), diags
- }
-
- return WriteValue{
- Host: hostVal,
- Port: portVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewWriteValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) WriteValue {
- object, diags := NewWriteValue(attributeTypes, attributes)
-
- if diags.HasError() {
- // This could potentially be added to the diag package.
- diagsStrings := make([]string, 0, len(diags))
-
- for _, diagnostic := range diags {
- diagsStrings = append(diagsStrings, fmt.Sprintf(
- "%s | %s | %s",
- diagnostic.Severity(),
- diagnostic.Summary(),
- diagnostic.Detail()))
- }
-
- panic("NewWriteValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
- }
-
- return object
-}
-
-func (t WriteType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
- if in.Type() == nil {
- return NewWriteValueNull(), nil
- }
-
- if !in.Type().Equal(t.TerraformType(ctx)) {
- return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
- }
-
- if !in.IsKnown() {
- return NewWriteValueUnknown(), nil
- }
-
- if in.IsNull() {
- return NewWriteValueNull(), nil
- }
-
- attributes := map[string]attr.Value{}
-
- val := map[string]tftypes.Value{}
-
- err := in.As(&val)
-
- if err != nil {
- return nil, err
- }
-
- for k, v := range val {
- a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
-
- if err != nil {
- return nil, err
- }
-
- attributes[k] = a
- }
-
- return NewWriteValueMust(WriteValue{}.AttributeTypes(ctx), attributes), nil
-}
-
-func (t WriteType) ValueType(ctx context.Context) attr.Value {
- return WriteValue{}
-}
-
-var _ basetypes.ObjectValuable = WriteValue{}
-
-type WriteValue struct {
Host basetypes.StringValue `tfsdk:"host"`
Port basetypes.Int64Value `tfsdk:"port"`
state attr.ValueState
}
-func (v WriteValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
+func (v ConnectionInfoValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
attrTypes := make(map[string]tftypes.Type, 2)
var val tftypes.Value
@@ -901,19 +538,19 @@ func (v WriteValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error)
}
}
-func (v WriteValue) IsNull() bool {
+func (v ConnectionInfoValue) IsNull() bool {
return v.state == attr.ValueStateNull
}
-func (v WriteValue) IsUnknown() bool {
+func (v ConnectionInfoValue) IsUnknown() bool {
return v.state == attr.ValueStateUnknown
}
-func (v WriteValue) String() string {
- return "WriteValue"
+func (v ConnectionInfoValue) String() string {
+ return "ConnectionInfoValue"
}
-func (v WriteValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
+func (v ConnectionInfoValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
var diags diag.Diagnostics
attributeTypes := map[string]attr.Type{
@@ -939,8 +576,8 @@ func (v WriteValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, d
return objVal, diags
}
-func (v WriteValue) Equal(o attr.Value) bool {
- other, ok := o.(WriteValue)
+func (v ConnectionInfoValue) Equal(o attr.Value) bool {
+ other, ok := o.(ConnectionInfoValue)
if !ok {
return false
@@ -965,15 +602,15 @@ func (v WriteValue) Equal(o attr.Value) bool {
return true
}
-func (v WriteValue) Type(ctx context.Context) attr.Type {
- return WriteType{
+func (v ConnectionInfoValue) Type(ctx context.Context) attr.Type {
+ return ConnectionInfoType{
basetypes.ObjectType{
AttrTypes: v.AttributeTypes(ctx),
},
}
}
-func (v WriteValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
+func (v ConnectionInfoValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"host": basetypes.StringType{},
"port": basetypes.Int64Type{},
diff --git a/stackit/internal/services/postgresflexalpha/instance/schema_test.go b/stackit/internal/services/postgresflexalpha/instance/schema_test.go
new file mode 100644
index 00000000..ec567d75
--- /dev/null
+++ b/stackit/internal/services/postgresflexalpha/instance/schema_test.go
@@ -0,0 +1,33 @@
+package postgresflexalpha
+
+import (
+ "context"
+ "testing"
+
+ // The fwresource import alias is so there is no collision
+ // with the more typical acceptance testing import:
+ // "github.com/hashicorp/terraform-plugin-testing/helper/resource"
+ fwresource "github.com/hashicorp/terraform-plugin-framework/resource"
+)
+
+func TestInstanceResourceSchema(t *testing.T) {
+ t.Parallel()
+
+ ctx := context.Background()
+ schemaRequest := fwresource.SchemaRequest{}
+ schemaResponse := &fwresource.SchemaResponse{}
+
+ // Instantiate the resource.Resource and call its Schema method
+ NewInstanceResource().Schema(ctx, schemaRequest, schemaResponse)
+
+ if schemaResponse.Diagnostics.HasError() {
+ t.Fatalf("Schema method diagnostics: %+v", schemaResponse.Diagnostics)
+ }
+
+ // Validate the schema
+ diagnostics := schemaResponse.Schema.ValidateImplementation(ctx)
+
+ if diagnostics.HasError() {
+ t.Fatalf("Schema validation diagnostics: %+v", diagnostics)
+ }
+}
diff --git a/stackit/internal/services/postgresflexalpha/main.go b/stackit/internal/services/postgresflexalpha/main.go
new file mode 100644
index 00000000..5e20f208
--- /dev/null
+++ b/stackit/internal/services/postgresflexalpha/main.go
@@ -0,0 +1 @@
+package postgresflexalpha
diff --git a/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go b/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go
index b5707376..a2920107 100644
--- a/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go
+++ b/stackit/internal/services/postgresflexalpha/postgresflex_acc_test.go
@@ -1,1206 +1,573 @@
+// Copyright (c) STACKIT
+
package postgresflexalpha_test
import (
"context"
_ "embed"
"fmt"
- "log"
- "math"
- "os"
- "strconv"
"strings"
"testing"
- "time"
"github.com/hashicorp/terraform-plugin-testing/helper/acctest"
"github.com/hashicorp/terraform-plugin-testing/helper/resource"
"github.com/hashicorp/terraform-plugin-testing/terraform"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
"github.com/stackitcloud/stackit-sdk-go/core/utils"
- "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
+ "github.com/stackitcloud/stackit-sdk-go/core/config"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- postgresflexalphaInstance "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/postgresflexalpha"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/testutil"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/internal/testutils"
- // The fwresource import alias is so there is no collision
- // with the more typical acceptance testing import:
- // "github.com/hashicorp/terraform-plugin-testing/helper/resource"
- fwresource "github.com/hashicorp/terraform-plugin-framework/resource"
+ postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
)
-const pfx = "stackitprivatepreview_postgresflexalpha"
+var (
+ //go:embed testdata/resource-complete.tf
+ resourceSecurityGroupMinConfig string //nolint:unused // needs implementation
+)
-func TestInstanceResourceSchema(t *testing.T) {
- // t.Parallel()
+// Instance resource data
+var instanceResource = map[string]string{
+ "project_id": testutil.ProjectId,
+ "name": fmt.Sprintf("tf-acc-%s", acctest.RandStringFromCharSet(7, acctest.CharSetAlphaNum)),
+ "acl": "192.168.0.0/16",
+ "backup_schedule": "00 16 * * *",
+ "backup_schedule_updated": "00 12 * * *",
+ "flavor_cpu": "2",
+ "flavor_ram": "4",
+ "flavor_description": "Small, Compute optimized",
+ "replicas": "1",
+ "storage_class": "premium-perf12-stackit",
+ "storage_size": "5",
+ "version": "14",
+ "flavor_id": "2.4",
+}
- ctx := context.Background()
- schemaRequest := fwresource.SchemaRequest{}
- schemaResponse := &fwresource.SchemaResponse{}
+// User resource data
+var userResource = map[string]string{
+ "username": fmt.Sprintf("tfaccuser%s", acctest.RandStringFromCharSet(4, acctest.CharSetAlpha)),
+ "role": "createdb",
+ "project_id": instanceResource["project_id"],
+}
- // Instantiate the resource.Resource and call its Schema method
- postgresflexalphaInstance.NewInstanceResource().Schema(ctx, schemaRequest, schemaResponse)
+// Database resource data
+var databaseResource = map[string]string{
+ "name": fmt.Sprintf("tfaccdb%s", acctest.RandStringFromCharSet(4, acctest.CharSetAlphaNum)),
+}
- if schemaResponse.Diagnostics.HasError() {
- t.Fatalf("Schema method diagnostics: %+v", schemaResponse.Diagnostics)
+func configResources(backupSchedule string, region *string) string {
+ var regionConfig string
+ if region != nil {
+ regionConfig = fmt.Sprintf(`region = %q`, *region)
}
+ return fmt.Sprintf(
+ `
+ %s
- // Validate the schema
- diagnostics := schemaResponse.Schema.ValidateImplementation(ctx)
+ resource "stackit_postgresflex_instance" "instance" {
+ project_id = "%s"
+ name = "%s"
+ acl = ["%s"]
+ backup_schedule = "%s"
+ flavor = {
+ cpu = %s
+ ram = %s
+ }
+ replicas = %s
+ storage = {
+ class = "%s"
+ size = %s
+ }
+ version = "%s"
+ %s
+ }
- if diagnostics.HasError() {
- t.Fatalf("Schema validation diagnostics: %+v", diagnostics)
- }
+ resource "stackit_postgresflex_user" "user" {
+ project_id = stackit_postgresflex_instance.instance.project_id
+ instance_id = stackit_postgresflex_instance.instance.instance_id
+ username = "%s"
+ roles = ["%s"]
+ }
+
+ resource "stackit_postgresflex_database" "database" {
+ project_id = stackit_postgresflex_instance.instance.project_id
+ instance_id = stackit_postgresflex_instance.instance.instance_id
+ name = "%s"
+ owner = stackit_postgresflex_user.user.username
+ }
+ `,
+ testutil.PostgresFlexProviderConfig(),
+ instanceResource["project_id"],
+ instanceResource["name"],
+ instanceResource["acl"],
+ backupSchedule,
+ instanceResource["flavor_cpu"],
+ instanceResource["flavor_ram"],
+ instanceResource["replicas"],
+ instanceResource["storage_class"],
+ instanceResource["storage_size"],
+ instanceResource["version"],
+ regionConfig,
+ userResource["username"],
+ userResource["role"],
+ databaseResource["name"],
+ )
}
-func TestMain(m *testing.M) {
- testutils.Setup()
- code := m.Run()
- // shutdown()
- os.Exit(code)
-}
-
-func testAccPreCheck(t *testing.T) {
- if _, ok := os.LookupEnv("TF_ACC_PROJECT_ID"); !ok {
- t.Fatalf("could not find env var TF_ACC_PROJECT_ID")
- }
-}
-
-type resData struct {
- ServiceAccountFilePath string
- ProjectID string
- Region string
- Name string
- TfName string
- FlavorID string
- BackupSchedule string
- UseEncryption bool
- KekKeyID string
- KekKeyRingID string
- KekKeyVersion uint8
- KekServiceAccount string
- PerformanceClass string
- Replicas uint32
- Size uint32
- ACLString string
- AccessScope string
- RetentionDays uint32
- Version string
- Users []User
- Databases []Database
-}
-
-type User struct {
- Name string
- ProjectID string
- Roles []string
-}
-
-type Database struct {
- Name string
- ProjectID string
- Owner string
-}
-
-func getExample() resData {
- name := acctest.RandomWithPrefix("tf-acc")
- return resData{
- Region: os.Getenv("TF_ACC_REGION"),
- ServiceAccountFilePath: os.Getenv("TF_ACC_SERVICE_ACCOUNT_FILE"),
- ProjectID: os.Getenv("TF_ACC_PROJECT_ID"),
- Name: name,
- TfName: name,
- FlavorID: "2.4",
- BackupSchedule: "0 0 * * *",
- UseEncryption: false,
- RetentionDays: 33,
- Replicas: 1,
- PerformanceClass: "premium-perf2-stackit",
- Size: 10,
- ACLString: "0.0.0.0/0",
- AccessScope: "PUBLIC",
- Version: "17",
- }
-}
-
-func TestAccInstance(t *testing.T) {
- exData := getExample()
-
- updNameData := exData
- updNameData.Name = "name-updated"
-
- updSizeData := exData
- updSizeData.Size = 25
-
- updBackupSched := updSizeData
- // api should complain about more than one daily backup
- updBackupSched.BackupSchedule = "30 3 * * *"
-
- /*
- {
- "backupSchedule": "6 6 * * *",
- "flavorId": "1.2",
- "name": "postgres-instance",
- "network": {
- "acl": [
- "198.51.100.0/24"
- ]
- },
- "replicas": 1,
- "retentionDays": 35,
- "storage": {
- "size": 10
- },
- "version": "string"
- }
- */
-
- testItemID := testutils.ResStr(pfx, "instance", exData.TfName)
-
- resource.ParallelTest(
+func TestAccPostgresFlexFlexResource(t *testing.T) {
+ resource.Test(
t, resource.TestCase{
- PreCheck: func() {
- testAccPreCheck(t)
- t.Logf(" ... working on instance %s", exData.TfName)
- },
+ ProtoV6ProviderFactories: testutil.TestAccProtoV6ProviderFactories,
CheckDestroy: testAccCheckPostgresFlexDestroy,
- ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
Steps: []resource.TestStep{
- // Create and verify
+ // Creation
{
- //PreConfig: func() {
- // //
- // },
- Config: testutils.StringFromTemplateMust(
- "testdata/instance_template.gompl",
- exData,
- ),
+ Config: configResources(instanceResource["backup_schedule"], &testutil.Region),
Check: resource.ComposeAggregateTestCheckFunc(
- // check params acl count
- resource.TestCheckResourceAttr(testItemID, "acl.#", "1"),
-
- // check params are set
- resource.TestCheckResourceAttrSet(testItemID, "backup_schedule"),
-
- //// connection_info should contain 1 sub entry
- // resource.TestCheckResourceAttr(testItemID, "connection_info.%", "1"),
- //
- //// connection_info.write should contain 2 sub entries
- // resource.TestCheckResourceAttr(testItemID, "connection_info.write", "2"),
- //
- // resource.TestCheckResourceAttrSet(testItemID, "connection_info.write.host"),
- // resource.TestCheckResourceAttrSet(testItemID, "connection_info.write.port"),
-
- resource.TestCheckResourceAttrSet(testItemID, "flavor_id"),
- resource.TestCheckResourceAttrSet(testItemID, "id"),
- resource.TestCheckResourceAttrSet(testItemID, "instance_id"),
- resource.TestCheckResourceAttrSet(testItemID, "is_deletable"),
- resource.TestCheckResourceAttrSet(testItemID, "name"),
-
- // network should contain 4 sub entries
- resource.TestCheckResourceAttr(testItemID, "network.%", "4"),
-
- resource.TestCheckResourceAttrSet(testItemID, "network.access_scope"),
-
- // on unencrypted instances we expect this to be empty
- resource.TestCheckResourceAttr(testItemID, "network.instance_address", ""),
- resource.TestCheckResourceAttr(testItemID, "network.router_address", ""),
-
- // only one acl entry should be set
- resource.TestCheckResourceAttr(testItemID, "network.acl.#", "1"),
-
- resource.TestCheckResourceAttrSet(testItemID, "replicas"),
- resource.TestCheckResourceAttrSet(testItemID, "retention_days"),
- resource.TestCheckResourceAttrSet(testItemID, "status"),
-
- // storage should contain 2 sub entries
- resource.TestCheckResourceAttr(testItemID, "storage.%", "2"),
-
- resource.TestCheckResourceAttrSet(testItemID, "storage.performance_class"),
- resource.TestCheckResourceAttrSet(testItemID, "storage.size"),
- resource.TestCheckResourceAttrSet(testItemID, "version"),
-
- // check absent attr
- resource.TestCheckNoResourceAttr(testItemID, "encryption"),
- resource.TestCheckNoResourceAttr(testItemID, "encryption.kek_key_id"),
- resource.TestCheckNoResourceAttr(testItemID, "encryption.kek_key_ring_id"),
- resource.TestCheckNoResourceAttr(testItemID, "encryption.kek_key_version"),
- resource.TestCheckNoResourceAttr(testItemID, "encryption.service_account"),
-
- // check param values
- resource.TestCheckResourceAttr(testItemID, "name", exData.Name),
- ),
- },
- // Update name and verify
- {
- Config: testutils.StringFromTemplateMust(
- "testdata/instance_template.gompl",
- updNameData,
- ),
- Check: resource.ComposeTestCheckFunc(
+ // Instance
resource.TestCheckResourceAttr(
- testutils.ResStr(pfx, "instance", exData.TfName),
+ "stackit_postgresflex_instance.instance",
+ "project_id",
+ instanceResource["project_id"],
+ ),
+ resource.TestCheckResourceAttrSet(
+ "stackit_postgresflex_instance.instance",
+ "instance_id",
+ ),
+ resource.TestCheckResourceAttr(
+ "stackit_postgresflex_instance.instance",
"name",
- updNameData.Name,
+ instanceResource["name"],
),
- ),
- },
- // Update size and verify
- {
- Config: testutils.StringFromTemplateMust(
- "testdata/instance_template.gompl",
- updSizeData,
- ),
- Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr(
- testutils.ResStr(pfx, "instance", exData.TfName),
- "storage.size",
- strconv.Itoa(int(updSizeData.Size)),
+ "stackit_postgresflex_instance.instance",
+ "acl.#",
+ "1",
),
- ),
- },
- // Update backup schedule
- {
- Config: testutils.StringFromTemplateMust(
- "testdata/instance_template.gompl",
- updBackupSched,
- ),
- Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr(
- testutils.ResStr(pfx, "instance", exData.TfName),
+ "stackit_postgresflex_instance.instance",
+ "acl.0",
+ instanceResource["acl"],
+ ),
+ resource.TestCheckResourceAttrSet(
+ "stackit_postgresflex_instance.instance",
+ "flavor.id",
+ ),
+ resource.TestCheckResourceAttrSet(
+ "stackit_postgresflex_instance.instance",
+ "flavor.description",
+ ),
+ resource.TestCheckResourceAttr(
+ "stackit_postgresflex_instance.instance",
"backup_schedule",
- updBackupSched.BackupSchedule,
+ instanceResource["backup_schedule"],
+ ),
+ resource.TestCheckResourceAttr(
+ "stackit_postgresflex_instance.instance",
+ "flavor.cpu",
+ instanceResource["flavor_cpu"],
+ ),
+ resource.TestCheckResourceAttr(
+ "stackit_postgresflex_instance.instance",
+ "flavor.ram",
+ instanceResource["flavor_ram"],
+ ),
+ resource.TestCheckResourceAttr(
+ "stackit_postgresflex_instance.instance",
+ "replicas",
+ instanceResource["replicas"],
+ ),
+ resource.TestCheckResourceAttr(
+ "stackit_postgresflex_instance.instance",
+ "storage.class",
+ instanceResource["storage_class"],
+ ),
+ resource.TestCheckResourceAttr(
+ "stackit_postgresflex_instance.instance",
+ "storage.size",
+ instanceResource["storage_size"],
+ ),
+ resource.TestCheckResourceAttr(
+ "stackit_postgresflex_instance.instance",
+ "version",
+ instanceResource["version"],
+ ),
+ resource.TestCheckResourceAttr(
+ "stackit_postgresflex_instance.instance",
+ "region",
+ testutil.Region,
+ ),
+
+ // User
+ resource.TestCheckResourceAttrPair(
+ "stackit_postgresflex_user.user", "project_id",
+ "stackit_postgresflex_instance.instance", "project_id",
+ ),
+ resource.TestCheckResourceAttrPair(
+ "stackit_postgresflex_user.user", "instance_id",
+ "stackit_postgresflex_instance.instance", "instance_id",
+ ),
+ resource.TestCheckResourceAttrSet("stackit_postgresflex_user.user", "user_id"),
+ resource.TestCheckResourceAttrSet("stackit_postgresflex_user.user", "password"),
+
+ // Database
+ resource.TestCheckResourceAttrPair(
+ "stackit_postgresflex_database.database", "project_id",
+ "stackit_postgresflex_instance.instance", "project_id",
+ ),
+ resource.TestCheckResourceAttrPair(
+ "stackit_postgresflex_database.database", "instance_id",
+ "stackit_postgresflex_instance.instance", "instance_id",
+ ),
+ resource.TestCheckResourceAttr(
+ "stackit_postgresflex_database.database",
+ "name",
+ databaseResource["name"],
+ ),
+ resource.TestCheckResourceAttrPair(
+ "stackit_postgresflex_database.database", "owner",
+ "stackit_postgresflex_user.user", "username",
),
),
},
- //// Import test
- //{
- // ResourceName: "example_resource.test",
- // ImportState: true,
- // ImportStateVerify: true,
- // },
- },
- },
- )
-}
-
-func TestAccInstanceWithUsers(t *testing.T) {
- data := getExample()
-
- userName := "testUser"
- data.Users = []User{
- {
- Name: userName,
- ProjectID: os.Getenv("TF_ACC_PROJECT_ID"),
- Roles: []string{"login"},
- },
- }
-
- resource.ParallelTest(
- t, resource.TestCase{
- PreCheck: func() {
- testAccPreCheck(t)
- t.Logf(" ... working on instance %s", data.TfName)
- },
- CheckDestroy: testAccCheckPostgresFlexDestroy,
- ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
- Steps: []resource.TestStep{
- // Create and verify
+ // data source
{
- Config: testutils.StringFromTemplateMust(
- "testdata/instance_template.gompl",
- data,
+ Config: fmt.Sprintf(
+ `
+ %s
+
+ data "stackit_postgresflex_instance" "instance" {
+ project_id = stackit_postgresflex_instance.instance.project_id
+ instance_id = stackit_postgresflex_instance.instance.instance_id
+ }
+
+ data "stackit_postgresflex_user" "user" {
+ project_id = stackit_postgresflex_instance.instance.project_id
+ instance_id = stackit_postgresflex_instance.instance.instance_id
+ user_id = stackit_postgresflex_user.user.user_id
+ }
+
+ data "stackit_postgresflex_database" "database" {
+ project_id = stackit_postgresflex_instance.instance.project_id
+ instance_id = stackit_postgresflex_instance.instance.instance_id
+ database_id = stackit_postgresflex_database.database.database_id
+ }
+ `,
+ configResources(instanceResource["backup_schedule"], nil),
),
Check: resource.ComposeAggregateTestCheckFunc(
+ // Instance data
resource.TestCheckResourceAttr(
- testutils.ResStr(pfx, "instance", data.TfName),
- "name",
- data.Name,
+ "data.stackit_postgresflex_instance.instance",
+ "project_id",
+ instanceResource["project_id"],
+ ),
+ resource.TestCheckResourceAttr(
+ "data.stackit_postgresflex_instance.instance",
+ "name",
+ instanceResource["name"],
+ ),
+ resource.TestCheckResourceAttrPair(
+ "data.stackit_postgresflex_instance.instance", "project_id",
+ "stackit_postgresflex_instance.instance", "project_id",
+ ),
+ resource.TestCheckResourceAttrPair(
+ "data.stackit_postgresflex_instance.instance", "instance_id",
+ "stackit_postgresflex_instance.instance", "instance_id",
+ ),
+ resource.TestCheckResourceAttrPair(
+ "data.stackit_postgresflex_user.user", "instance_id",
+ "stackit_postgresflex_user.user", "instance_id",
+ ),
+
+ resource.TestCheckResourceAttr(
+ "data.stackit_postgresflex_instance.instance",
+ "acl.#",
+ "1",
+ ),
+ resource.TestCheckResourceAttr(
+ "data.stackit_postgresflex_instance.instance",
+ "acl.0",
+ instanceResource["acl"],
+ ),
+ resource.TestCheckResourceAttr(
+ "data.stackit_postgresflex_instance.instance",
+ "backup_schedule",
+ instanceResource["backup_schedule"],
+ ),
+ resource.TestCheckResourceAttr(
+ "data.stackit_postgresflex_instance.instance",
+ "flavor.id",
+ instanceResource["flavor_id"],
+ ),
+ resource.TestCheckResourceAttr(
+ "data.stackit_postgresflex_instance.instance",
+ "flavor.description",
+ instanceResource["flavor_description"],
+ ),
+ resource.TestCheckResourceAttr(
+ "data.stackit_postgresflex_instance.instance",
+ "flavor.cpu",
+ instanceResource["flavor_cpu"],
+ ),
+ resource.TestCheckResourceAttr(
+ "data.stackit_postgresflex_instance.instance",
+ "flavor.ram",
+ instanceResource["flavor_ram"],
+ ),
+ resource.TestCheckResourceAttr(
+ "data.stackit_postgresflex_instance.instance",
+ "replicas",
+ instanceResource["replicas"],
+ ),
+
+ // User data
+ resource.TestCheckResourceAttr(
+ "data.stackit_postgresflex_user.user",
+ "project_id",
+ userResource["project_id"],
+ ),
+ resource.TestCheckResourceAttrSet(
+ "data.stackit_postgresflex_user.user",
+ "user_id",
+ ),
+ resource.TestCheckResourceAttr(
+ "data.stackit_postgresflex_user.user",
+ "username",
+ userResource["username"],
+ ),
+ resource.TestCheckResourceAttr(
+ "data.stackit_postgresflex_user.user",
+ "roles.#",
+ "1",
+ ),
+ resource.TestCheckResourceAttr(
+ "data.stackit_postgresflex_user.user",
+ "roles.0",
+ userResource["role"],
+ ),
+ resource.TestCheckResourceAttrSet(
+ "data.stackit_postgresflex_user.user",
+ "host",
+ ),
+ resource.TestCheckResourceAttrSet(
+ "data.stackit_postgresflex_user.user",
+ "port",
+ ),
+
+ // Database data
+ resource.TestCheckResourceAttr(
+ "data.stackit_postgresflex_database.database",
+ "project_id",
+ instanceResource["project_id"],
+ ),
+ resource.TestCheckResourceAttr(
+ "data.stackit_postgresflex_database.database",
+ "name",
+ databaseResource["name"],
+ ),
+ resource.TestCheckResourceAttrPair(
+ "data.stackit_postgresflex_database.database",
+ "instance_id",
+ "stackit_postgresflex_instance.instance",
+ "instance_id",
+ ),
+ resource.TestCheckResourceAttrPair(
+ "data.stackit_postgresflex_database.database",
+ "owner",
+ "data.stackit_postgresflex_user.user",
+ "username",
),
- resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "instance", data.TfName), "id"),
- resource.TestCheckResourceAttr(testutils.ResStr(pfx, "user", userName), "name", userName),
- resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "user", userName), "id"),
),
},
- },
- },
- )
-}
-
-func TestAccInstanceWithDatabases(t *testing.T) {
- data := getExample()
-
- dbName := "testdb"
- userName := "testUser"
- data.Users = []User{
- {
- Name: userName,
- ProjectID: os.Getenv("TF_ACC_PROJECT_ID"),
- Roles: []string{"login"},
- },
- }
-
- data.Databases = []Database{
- {
- Name: dbName,
- ProjectID: os.Getenv("TF_ACC_PROJECT_ID"),
- Owner: userName,
- },
- }
-
- resource.ParallelTest(
- t, resource.TestCase{
- PreCheck: func() {
- testAccPreCheck(t)
- t.Logf(" ... working on instance %s", data.TfName)
- },
- CheckDestroy: testAccCheckPostgresFlexDestroy,
- ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
- Steps: []resource.TestStep{
- // Create and verify
+ // Import
{
- Config: testutils.StringFromTemplateMust(
- "testdata/instance_template.gompl",
- data,
- ),
- Check: resource.ComposeAggregateTestCheckFunc(
- resource.TestCheckResourceAttr(
- testutils.ResStr(pfx, "instance", data.TfName),
- "name",
- data.Name,
- ),
- resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "instance", data.TfName), "id"),
- resource.TestCheckResourceAttr(testutils.ResStr(pfx, "user", userName), "name", userName),
- resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "user", userName), "id"),
- resource.TestCheckResourceAttr(testutils.ResStr(pfx, "database", dbName), "name", dbName),
- resource.TestCheckResourceAttr(testutils.ResStr(pfx, "database", dbName), "owner", userName),
- resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "database", dbName), "id"),
- ),
+ ResourceName: "stackit_postgresflex_instance.instance",
+ ImportStateIdFunc: func(s *terraform.State) (string, error) {
+ r, ok := s.RootModule().Resources["stackit_postgresflex_instance.instance"]
+ if !ok {
+ return "", fmt.Errorf("couldn't find resource stackit_postgresflex_instance.instance")
+ }
+ instanceId, ok := r.Primary.Attributes["instance_id"]
+ if !ok {
+ return "", fmt.Errorf("couldn't find attribute instance_id")
+ }
+
+ return fmt.Sprintf("%s,%s,%s", testutil.ProjectId, testutil.Region, instanceId), nil
+ },
+ ImportState: true,
+ ImportStateVerify: true,
+ ImportStateVerifyIgnore: []string{"password"},
},
- },
- },
- )
-}
-
-func TestAccEncryptedInstanceWithDatabases(t *testing.T) {
- encKekKeyID, ok := os.LookupEnv("TF_ACC_KEK_KEY_ID")
- if !ok || encKekKeyID == "" {
- t.Skip("env var TF_ACC_KEK_KEY_ID needed for encryption test")
- }
-
- encKekKeyRingID, ok := os.LookupEnv("TF_ACC_KEK_KEY_RING_ID")
- if !ok || encKekKeyRingID == "" {
- t.Skip("env var TF_ACC_KEK_KEY_RING_ID needed for encryption test")
- }
-
- encKekKeyVersion, ok := os.LookupEnv("TF_ACC_KEK_KEY_VERSION")
- if !ok || encKekKeyVersion == "" {
- t.Skip("env var TF_ACC_KEK_KEY_VERSION needed for encryption test")
- }
-
- encSvcAcc, ok := os.LookupEnv("TF_ACC_KEK_SERVICE_ACCOUNT")
- if !ok || encSvcAcc == "" {
- t.Skip("env var TF_ACC_KEK_SERVICE_ACCOUNT needed for encryption test")
- }
-
- data := getExample()
- data.UseEncryption = true
- data.KekKeyID = encKekKeyID
- data.KekKeyRingID = encKekKeyRingID
- data.KekServiceAccount = encSvcAcc
- encKekKeyVersionInt, err := strconv.Atoi(encKekKeyVersion)
- if err != nil {
- t.Errorf("error converting string to int")
- }
- if encKekKeyVersionInt > math.MaxUint8 {
- t.Errorf("value too large to convert to uint8")
- }
- data.KekKeyVersion = uint8(encKekKeyVersionInt) //nolint:gosec // handled above
-
- dbName := "testdb"
- userName := "testUser"
- data.Users = []User{
- {
- Name: userName,
- ProjectID: os.Getenv("TF_ACC_PROJECT_ID"),
- Roles: []string{"login"},
- },
- }
-
- data.Databases = []Database{
- {
- Name: dbName,
- ProjectID: os.Getenv("TF_ACC_PROJECT_ID"),
- Owner: userName,
- },
- }
-
- resource.ParallelTest(
- t, resource.TestCase{
- PreCheck: func() {
- testAccPreCheck(t)
- t.Logf(" ... working on instance %s", data.TfName)
- },
- CheckDestroy: testAccCheckPostgresFlexDestroy,
- ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
- Steps: []resource.TestStep{
- // Create and verify
{
- Config: testutils.StringFromTemplateMust(
- "testdata/instance_template.gompl",
- data,
- ),
+ ResourceName: "stackit_postgresflex_user.user",
+ ImportStateIdFunc: func(s *terraform.State) (string, error) {
+ r, ok := s.RootModule().Resources["stackit_postgresflex_user.user"]
+ if !ok {
+ return "", fmt.Errorf("couldn't find resource stackit_postgresflex_user.user")
+ }
+ instanceId, ok := r.Primary.Attributes["instance_id"]
+ if !ok {
+ return "", fmt.Errorf("couldn't find attribute instance_id")
+ }
+ userId, ok := r.Primary.Attributes["user_id"]
+ if !ok {
+ return "", fmt.Errorf("couldn't find attribute user_id")
+ }
+
+ return fmt.Sprintf("%s,%s,%s,%s", testutil.ProjectId, testutil.Region, instanceId, userId), nil
+ },
+ ImportState: true,
+ ImportStateVerify: true,
+ ImportStateVerifyIgnore: []string{"password", "uri"},
+ },
+ {
+ ResourceName: "stackit_postgresflex_database.database",
+ ImportStateIdFunc: func(s *terraform.State) (string, error) {
+ r, ok := s.RootModule().Resources["stackit_postgresflex_database.database"]
+ if !ok {
+ return "", fmt.Errorf("couldn't find resource stackit_postgresflex_database.database")
+ }
+ instanceId, ok := r.Primary.Attributes["instance_id"]
+ if !ok {
+ return "", fmt.Errorf("couldn't find attribute instance_id")
+ }
+ databaseId, ok := r.Primary.Attributes["database_id"]
+ if !ok {
+ return "", fmt.Errorf("couldn't find attribute database_id")
+ }
+
+ return fmt.Sprintf(
+ "%s,%s,%s,%s",
+ testutil.ProjectId,
+ testutil.Region,
+ instanceId,
+ databaseId,
+ ), nil
+ },
+ ImportState: true,
+ ImportStateVerify: true,
+ },
+ // Update
+ {
+ Config: configResources(instanceResource["backup_schedule_updated"], nil),
Check: resource.ComposeAggregateTestCheckFunc(
+ // Instance data
resource.TestCheckResourceAttr(
- testutils.ResStr(pfx, "instance", data.TfName),
- "name",
- data.Name,
+ "stackit_postgresflex_instance.instance",
+ "project_id",
+ instanceResource["project_id"],
+ ),
+ resource.TestCheckResourceAttrSet(
+ "stackit_postgresflex_instance.instance",
+ "instance_id",
+ ),
+ resource.TestCheckResourceAttr(
+ "stackit_postgresflex_instance.instance",
+ "name",
+ instanceResource["name"],
+ ),
+ resource.TestCheckResourceAttr(
+ "stackit_postgresflex_instance.instance",
+ "acl.#",
+ "1",
+ ),
+ resource.TestCheckResourceAttr(
+ "stackit_postgresflex_instance.instance",
+ "acl.0",
+ instanceResource["acl"],
+ ),
+ resource.TestCheckResourceAttr(
+ "stackit_postgresflex_instance.instance",
+ "backup_schedule",
+ instanceResource["backup_schedule_updated"],
+ ),
+ resource.TestCheckResourceAttrSet(
+ "stackit_postgresflex_instance.instance",
+ "flavor.id",
+ ),
+ resource.TestCheckResourceAttrSet(
+ "stackit_postgresflex_instance.instance",
+ "flavor.description",
+ ),
+ resource.TestCheckResourceAttr(
+ "stackit_postgresflex_instance.instance",
+ "flavor.cpu",
+ instanceResource["flavor_cpu"],
+ ),
+ resource.TestCheckResourceAttr(
+ "stackit_postgresflex_instance.instance",
+ "flavor.ram",
+ instanceResource["flavor_ram"],
+ ),
+ resource.TestCheckResourceAttr(
+ "stackit_postgresflex_instance.instance",
+ "replicas",
+ instanceResource["replicas"],
+ ),
+ resource.TestCheckResourceAttr(
+ "stackit_postgresflex_instance.instance",
+ "storage.class",
+ instanceResource["storage_class"],
+ ),
+ resource.TestCheckResourceAttr(
+ "stackit_postgresflex_instance.instance",
+ "storage.size",
+ instanceResource["storage_size"],
+ ),
+ resource.TestCheckResourceAttr(
+ "stackit_postgresflex_instance.instance",
+ "version",
+ instanceResource["version"],
),
- resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "instance", data.TfName), "id"),
- resource.TestCheckResourceAttr(testutils.ResStr(pfx, "user", userName), "name", userName),
- resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "user", userName), "id"),
- resource.TestCheckResourceAttr(testutils.ResStr(pfx, "database", dbName), "name", dbName),
- resource.TestCheckResourceAttr(testutils.ResStr(pfx, "database", dbName), "owner", userName),
- resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "database", dbName), "id"),
),
},
+ // Deletion is done by the framework implicitly
},
},
)
}
-// func setupMockServer() *httptest.Server {
-// mux := http.NewServeMux()
-//
-// mux.HandleFunc("/api/resources", func(w http.ResponseWriter, r *http.Request) {
-// switch r.Method {
-// case http.MethodPost:
-// w.WriteHeader(http.StatusCreated)
-// err := json.NewEncoder(w).Encode(map[string]string{
-// "id": "mock-id-123",
-// "name": "test-resource",
-// })
-// if err != nil {
-// log.Fatalln(err)
-// }
-// case http.MethodGet:
-// w.WriteHeader(http.StatusOK)
-// err := json.NewEncoder(w).Encode([]map[string]string{})
-// if err != nil {
-// log.Fatalln(err)
-// }
-// }
-// })
-//
-// return httptest.NewServer(mux)
-//}
-//
-// func TestUnitResourceCreate(t *testing.T) {
-// server := setupMockServer()
-// defer server.Close()
-//
-// // Configure provider to use mock server URL
-// err := os.Setenv("API_ENDPOINT", server.URL)
-// if err != nil {
-// log.Fatalln(err)
-// }
-//
-// // Run unit tests against mock
-//}
-
-// func TestNewInstanceResource(t *testing.T) {
-// exData := resData{
-// Region: "eu01",
-// ServiceAccountFilePath: sa_file,
-// ProjectID: project_id,
-// Name: "testRes",
-// }
-// resource.ParallelTest(t, resource.TestCase{
-// ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
-// Steps: []resource.TestStep{
-// {
-// Config: testAccResourceEncryptionExampleConfig(exData),
-// Check: resource.ComposeAggregateTestCheckFunc(
-// resource.TestCheckResourceAttr("example_resource.test", "name", exData.Name),
-// resource.TestCheckResourceAttrSet("example_resource.test", "id"),
-// ),
-// },
-// },
-// })
-//
-// //tests := []struct {
-// // name string
-// // want resource.Resource
-// //}{
-// // {
-// // name: "create empty instance resource",
-// // want: &instanceResource{},
-// // },
-// //}
-// //for _, tt := range tests {
-// // t.Run(tt.name, func(t *testing.T) {
-// // if got := NewInstanceResource(); !reflect.DeepEqual(got, tt.want) {
-// // t.Errorf("NewInstanceResource() = %v, want %v", got, tt.want)
-// // }
-// // })
-// //}
-//}
-
-//// Instance resource data
-// var instanceResource = map[string]string{
-// "project_id": testutils.ProjectId,
-// "region": "eu01",
-// "name": fmt.Sprintf("tf-acc-%s", acctest.RandStringFromCharSet(7, acctest.CharSetAlphaNum)),
-// "acl": "192.168.0.0/16",
-// "backup_schedule": "00 16 * * *",
-// "backup_schedule_updated": "00 12 * * *",
-// "retention_days": "33",
-// "flavor_cpu": "2",
-// "flavor_ram": "4",
-// "flavor_description": "Small, Compute optimized",
-// "replicas": "1",
-// "storage_class": "premium-perf12-stackit",
-// "storage_size": "5",
-// "version": "14",
-// "flavor_id": "2.4",
-// "kek_key_id": "UUID1",
-// "kek_key_ring_id": "UUID2",
-// "kek_key_version": "1",
-// "service_account": "service@account.com",
-// "access_scope": "SNA",
-//}
-//
-//// User resource data
-// var userResource = map[string]string{
-// "username": fmt.Sprintf("tfaccuser%s", acctest.RandStringFromCharSet(4, acctest.CharSetAlpha)),
-// "role": "createdb",
-// "project_id": testutils.ProjectId,
-//}
-//
-//// Database resource data
-// var databaseResource = map[string]string{
-// "name": fmt.Sprintf("tfaccdb%s", acctest.RandStringFromCharSet(4, acctest.CharSetAlphaNum)),
-// "project_id": testutils.ProjectId,
-//}
-//
-// func configResources(backupSchedule string, _ *string) string {
-// return fmt.Sprintf(
-// `
-// %s
-//
-//
-// resource "stackitprivatepreview_postgresflexalpha_instance" "instance" {
-// project_id = "%s"
-// region = "%s"
-// name = "%s"
-// backup_schedule = "%s"
-// retention_days = %s
-// flavor_id = %s
-// replicas = %s
-// storage = {
-// performance_class = "%s"
-// size = %s
-// }
-// encryption = {
-// kek_key_id = "%s"
-// kek_key_ring_id = "%s"
-// kek_key_version = "%s"
-// service_account = "%s"
-// }
-// network = {
-// acl = ["%s"]
-// access_scope = "%s"
-// }
-// version = %s
-// }
-//
-// resource "stackitprivatepreview_postgresflexalpha_user" "user" {
-// project_id = "%s"
-// instance_id = stackitprivatepreview_postgresflexalpha_instance.instance.instance_id
-// username = "%s"
-// roles = ["%s"]
-// }
-//
-// resource "stackitprivatepreview_postgresflexalpha_database" "database" {
-// project_id = "%s"
-// instance_id = stackitprivatepreview_postgresflexalpha_instance.instance.instance_id
-// name = "%s"
-// owner = stackitprivatepreview_postgresflexalpha_user.user.username
-// }
-// `,
-// testutils.PostgresFlexProviderConfig(
-// utils.GetEnvOrDefault("TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_FILE", "~/service-account.json"),
-// ),
-// instanceResource["project_id"],
-// instanceResource["region"],
-// instanceResource["name"],
-// backupSchedule,
-// instanceResource["retention_days"],
-// instanceResource["flavor_id"],
-// instanceResource["replicas"],
-// instanceResource["storage_class"],
-// instanceResource["storage_size"],
-// instanceResource["kek_key_id"],
-// instanceResource["kek_key_ring_id"],
-// instanceResource["kek_key_version"],
-// instanceResource["service_account"],
-// instanceResource["acl"],
-// instanceResource["access_scope"],
-// instanceResource["version"],
-//
-// userResource["project_id"],
-// userResource["username"],
-// userResource["role"],
-//
-// databaseResource["project_id"],
-// databaseResource["name"],
-// )
-//}
-//
-// func TestAccPostgresFlexFlexResource(t *testing.T) {
-// resource.ParallelTest(
-// t, resource.TestCase{
-// ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
-// CheckDestroy: testAccCheckPostgresFlexDestroy,
-// Steps: []resource.TestStep{
-// // Creation
-// {
-// // testdata/
-// // ConfigDirectory: config.TestNameDirectory(),
-//
-// // testdata//
-// // ConfigDirectory: config.TestStepDirectory(),
-// Config: configResources(instanceResource["backup_schedule"], &testutils.Region),
-// Check: resource.ComposeAggregateTestCheckFunc(
-// // Instance
-// resource.TestCheckResourceAttr(
-// "stackitprivatepreview_postgresflexalpha_instance.instance",
-// "project_id",
-// instanceResource["project_id"],
-// ),
-// resource.TestCheckResourceAttrSet(
-// "stackitprivatepreview_postgresflexalpha_instance.instance",
-// "instance_id",
-// ),
-// resource.TestCheckResourceAttr(
-// "stackitprivatepreview_postgresflexalpha_instance.instance",
-// "name",
-// instanceResource["name"],
-// ),
-// resource.TestCheckResourceAttr(
-// "stackitprivatepreview_postgresflexalpha_instance.instance",
-// "acl.#",
-// "1",
-// ),
-// resource.TestCheckResourceAttr(
-// "stackitprivatepreview_postgresflexalpha_instance.instance",
-// "acl.0",
-// instanceResource["acl"],
-// ),
-// resource.TestCheckResourceAttrSet(
-// "stackitprivatepreview_postgresflexalpha_instance.instance",
-// "flavor.id",
-// ),
-// resource.TestCheckResourceAttrSet(
-// "stackitprivatepreview_postgresflexalpha_instance.instance",
-// "flavor.description",
-// ),
-// resource.TestCheckResourceAttr(
-// "stackitprivatepreview_postgresflexalpha_instance.instance",
-// "backup_schedule",
-// instanceResource["backup_schedule"],
-// ),
-// resource.TestCheckResourceAttr(
-// "stackitprivatepreview_postgresflexalpha_instance.instance",
-// "flavor.cpu",
-// instanceResource["flavor_cpu"],
-// ),
-// resource.TestCheckResourceAttr(
-// "stackitprivatepreview_postgresflexalpha_instance.instance",
-// "flavor.ram",
-// instanceResource["flavor_ram"],
-// ),
-// resource.TestCheckResourceAttr(
-// "stackitprivatepreview_postgresflexalpha_instance.instance",
-// "replicas",
-// instanceResource["replicas"],
-// ),
-// resource.TestCheckResourceAttr(
-// "stackitprivatepreview_postgresflexalpha_instance.instance",
-// "storage.class",
-// instanceResource["storage_class"],
-// ),
-// resource.TestCheckResourceAttr(
-// "stackitprivatepreview_postgresflexalpha_instance.instance",
-// "storage.size",
-// instanceResource["storage_size"],
-// ),
-// resource.TestCheckResourceAttr(
-// "stackitprivatepreview_postgresflexalpha_instance.instance",
-// "version",
-// instanceResource["version"],
-// ),
-// resource.TestCheckResourceAttr(
-// "stackitprivatepreview_postgresflexalpha_instance.instance",
-// "region",
-// testutils.Region,
-// ),
-//
-// // User
-// resource.TestCheckResourceAttrPair(
-// "stackitprivatepreview_postgresflexalpha_user.user", "project_id",
-// "stackitprivatepreview_postgresflexalpha_instance.instance", "project_id",
-// ),
-// resource.TestCheckResourceAttrPair(
-// "stackitprivatepreview_postgresflexalpha_user.user", "instance_id",
-// "stackitprivatepreview_postgresflexalpha_instance.instance", "instance_id",
-// ),
-// resource.TestCheckResourceAttrSet("stackitprivatepreview_postgresflexalpha_user.user", "user_id"),
-// resource.TestCheckResourceAttrSet("stackitprivatepreview_postgresflexalpha_user.user", "password"),
-//
-// // Database
-// resource.TestCheckResourceAttrPair(
-// "stackitprivatepreview_postgresflexalpha_database.database", "project_id",
-// "stackitprivatepreview_postgresflexalpha_instance.instance", "project_id",
-// ),
-// resource.TestCheckResourceAttrPair(
-// "stackitprivatepreview_postgresflexalpha_database.database", "instance_id",
-// "stackitprivatepreview_postgresflexalpha_instance.instance", "instance_id",
-// ),
-// resource.TestCheckResourceAttr(
-// "stackitprivatepreview_postgresflexalpha_database.database",
-// "name",
-// databaseResource["name"],
-// ),
-// resource.TestCheckResourceAttrPair(
-// "stackitprivatepreview_postgresflexalpha_database.database", "owner",
-// "stackitprivatepreview_postgresflexalpha_user.user", "username",
-// ),
-// ),
-// },
-// // data source
-// {
-// Config: fmt.Sprintf(
-// `
-// %s
-//
-// data "stackitprivatepreview_postgresflexalpha_instance" "instance" {
-// project_id = stackitprivatepreview_postgresflexalpha_instance.instance.project_id
-// instance_id = stackitprivatepreview_postgresflexalpha_instance.instance.instance_id
-// }
-//
-// data "stackitprivatepreview_postgresflexalpha_user" "user" {
-// project_id = stackitprivatepreview_postgresflexalpha_instance.instance.project_id
-// instance_id = stackitprivatepreview_postgresflexalpha_instance.instance.instance_id
-// user_id = stackitprivatepreview_postgresflexalpha_user.user.user_id
-// }
-//
-// data "stackitprivatepreview_postgresflexalpha_database" "database" {
-// project_id = stackitprivatepreview_postgresflexalpha_instance.instance.project_id
-// instance_id = stackitprivatepreview_postgresflexalpha_instance.instance.instance_id
-// database_id = stackitprivatepreview_postgresflexalpha_database.database.database_id
-// }
-// `,
-// configResources(instanceResource["backup_schedule"], nil),
-// ),
-// Check: resource.ComposeAggregateTestCheckFunc(
-// // Instance data
-// resource.TestCheckResourceAttr(
-// "data.stackitprivatepreview_postgresflexalpha_instance.instance",
-// "project_id",
-// instanceResource["project_id"],
-// ),
-// resource.TestCheckResourceAttr(
-// "data.stackitprivatepreview_postgresflexalpha_instance.instance",
-// "name",
-// instanceResource["name"],
-// ),
-// resource.TestCheckResourceAttrPair(
-// "data.stackitprivatepreview_postgresflexalpha_instance.instance", "project_id",
-// "stackitprivatepreview_postgresflexalpha_instance.instance", "project_id",
-// ),
-// resource.TestCheckResourceAttrPair(
-// "data.stackitprivatepreview_postgresflexalpha_instance.instance", "instance_id",
-// "stackitprivatepreview_postgresflexalpha_instance.instance", "instance_id",
-// ),
-// resource.TestCheckResourceAttrPair(
-// "data.stackitprivatepreview_postgresflexalpha_user.user", "instance_id",
-// "stackitprivatepreview_postgresflexalpha_user.user", "instance_id",
-// ),
-//
-// resource.TestCheckResourceAttr(
-// "data.stackitprivatepreview_postgresflexalpha_instance.instance",
-// "acl.#",
-// "1",
-// ),
-// resource.TestCheckResourceAttr(
-// "data.stackitprivatepreview_postgresflexalpha_instance.instance",
-// "acl.0",
-// instanceResource["acl"],
-// ),
-// resource.TestCheckResourceAttr(
-// "data.stackitprivatepreview_postgresflexalpha_instance.instance",
-// "backup_schedule",
-// instanceResource["backup_schedule"],
-// ),
-// resource.TestCheckResourceAttr(
-// "data.stackitprivatepreview_postgresflexalpha_instance.instance",
-// "flavor.id",
-// instanceResource["flavor_id"],
-// ),
-// resource.TestCheckResourceAttr(
-// "data.stackitprivatepreview_postgresflexalpha_instance.instance",
-// "flavor.description",
-// instanceResource["flavor_description"],
-// ),
-// resource.TestCheckResourceAttr(
-// "data.stackitprivatepreview_postgresflexalpha_instance.instance",
-// "flavor.cpu",
-// instanceResource["flavor_cpu"],
-// ),
-// resource.TestCheckResourceAttr(
-// "data.stackitprivatepreview_postgresflexalpha_instance.instance",
-// "flavor.ram",
-// instanceResource["flavor_ram"],
-// ),
-// resource.TestCheckResourceAttr(
-// "data.stackitprivatepreview_postgresflexalpha_instance.instance",
-// "replicas",
-// instanceResource["replicas"],
-// ),
-//
-// // User data
-// resource.TestCheckResourceAttr(
-// "data.stackitprivatepreview_postgresflexalpha_user.user",
-// "project_id",
-// userResource["project_id"],
-// ),
-// resource.TestCheckResourceAttrSet(
-// "data.stackitprivatepreview_postgresflexalpha_user.user",
-// "user_id",
-// ),
-// resource.TestCheckResourceAttr(
-// "data.stackitprivatepreview_postgresflexalpha_user.user",
-// "username",
-// userResource["username"],
-// ),
-// resource.TestCheckResourceAttr(
-// "data.stackitprivatepreview_postgresflexalpha_user.user",
-// "roles.#",
-// "1",
-// ),
-// resource.TestCheckResourceAttr(
-// "data.stackitprivatepreview_postgresflexalpha_user.user",
-// "roles.0",
-// userResource["role"],
-// ),
-// resource.TestCheckResourceAttrSet(
-// "data.stackitprivatepreview_postgresflexalpha_user.user",
-// "host",
-// ),
-// resource.TestCheckResourceAttrSet(
-// "data.stackitprivatepreview_postgresflexalpha_user.user",
-// "port",
-// ),
-//
-// // Database data
-// resource.TestCheckResourceAttr(
-// "data.stackitprivatepreview_postgresflexalpha_database.database",
-// "project_id",
-// instanceResource["project_id"],
-// ),
-// resource.TestCheckResourceAttr(
-// "data.stackitprivatepreview_postgresflexalpha_database.database",
-// "name",
-// databaseResource["name"],
-// ),
-// resource.TestCheckResourceAttrPair(
-// "data.stackitprivatepreview_postgresflexalpha_database.database",
-// "instance_id",
-// "stackitprivatepreview_postgresflexalpha_instance.instance",
-// "instance_id",
-// ),
-// resource.TestCheckResourceAttrPair(
-// "data.stackitprivatepreview_postgresflexalpha_database.database",
-// "owner",
-// "data.stackitprivatepreview_postgresflexalpha_user.user",
-// "username",
-// ),
-// ),
-// },
-// // Import
-// {
-// ResourceName: "stackitprivatepreview_postgresflexalpha_instance.instance",
-// ImportStateIdFunc: func(s *terraform.State) (string, error) {
-// r, ok := s.RootModule().Resources["stackitprivatepreview_postgresflexalpha_instance.instance"]
-// if !ok {
-// return "", fmt.Errorf("couldn't find resource stackitprivatepreview_postgresflexalpha_instance.instance")
-// }
-// instanceId, ok := r.Primary.Attributes["instance_id"]
-// if !ok {
-// return "", fmt.Errorf("couldn't find attribute instance_id")
-// }
-//
-// return fmt.Sprintf("%s,%s,%s", testutils.ProjectId, testutils.Region, instanceId), nil
-// },
-// ImportState: true,
-// ImportStateVerify: true,
-// ImportStateVerifyIgnore: []string{"password"},
-// },
-// {
-// ResourceName: "stackitprivatepreview_postgresflexalpha_user.user",
-// ImportStateIdFunc: func(s *terraform.State) (string, error) {
-// r, ok := s.RootModule().Resources["stackitprivatepreview_postgresflexalpha_user.user"]
-// if !ok {
-// return "", fmt.Errorf("couldn't find resource stackitprivatepreview_postgresflexalpha_user.user")
-// }
-// instanceId, ok := r.Primary.Attributes["instance_id"]
-// if !ok {
-// return "", fmt.Errorf("couldn't find attribute instance_id")
-// }
-// userId, ok := r.Primary.Attributes["user_id"]
-// if !ok {
-// return "", fmt.Errorf("couldn't find attribute user_id")
-// }
-//
-// return fmt.Sprintf("%s,%s,%s,%s", testutils.ProjectId, testutils.Region, instanceId, userId), nil
-// },
-// ImportState: true,
-// ImportStateVerify: true,
-// ImportStateVerifyIgnore: []string{"password", "uri"},
-// },
-// {
-// ResourceName: "stackitprivatepreview_postgresflexalpha_database.database",
-// ImportStateIdFunc: func(s *terraform.State) (string, error) {
-// r, ok := s.RootModule().Resources["stackitprivatepreview_postgresflexalpha_database.database"]
-// if !ok {
-// return "", fmt.Errorf("couldn't find resource stackitprivatepreview_postgresflexalpha_database.database")
-// }
-// instanceId, ok := r.Primary.Attributes["instance_id"]
-// if !ok {
-// return "", fmt.Errorf("couldn't find attribute instance_id")
-// }
-// databaseId, ok := r.Primary.Attributes["database_id"]
-// if !ok {
-// return "", fmt.Errorf("couldn't find attribute database_id")
-// }
-//
-// return fmt.Sprintf(
-// "%s,%s,%s,%s",
-// testutils.ProjectId,
-// testutils.Region,
-// instanceId,
-// databaseId,
-// ), nil
-// },
-// ImportState: true,
-// ImportStateVerify: true,
-// },
-// // Update
-// {
-// Config: configResources(instanceResource["backup_schedule_updated"], nil),
-// Check: resource.ComposeAggregateTestCheckFunc(
-// // Instance data
-// resource.TestCheckResourceAttr(
-// "stackitprivatepreview_postgresflexalpha_instance.instance",
-// "project_id",
-// instanceResource["project_id"],
-// ),
-// resource.TestCheckResourceAttrSet(
-// "stackitprivatepreview_postgresflexalpha_instance.instance",
-// "instance_id",
-// ),
-// resource.TestCheckResourceAttr(
-// "stackitprivatepreview_postgresflexalpha_instance.instance",
-// "name",
-// instanceResource["name"],
-// ),
-// resource.TestCheckResourceAttr(
-// "stackitprivatepreview_postgresflexalpha_instance.instance",
-// "acl.#",
-// "1",
-// ),
-// resource.TestCheckResourceAttr(
-// "stackitprivatepreview_postgresflexalpha_instance.instance",
-// "acl.0",
-// instanceResource["acl"],
-// ),
-// resource.TestCheckResourceAttr(
-// "stackitprivatepreview_postgresflexalpha_instance.instance",
-// "backup_schedule",
-// instanceResource["backup_schedule_updated"],
-// ),
-// resource.TestCheckResourceAttrSet(
-// "stackitprivatepreview_postgresflexalpha_instance.instance",
-// "flavor.id",
-// ),
-// resource.TestCheckResourceAttrSet(
-// "stackitprivatepreview_postgresflexalpha_instance.instance",
-// "flavor.description",
-// ),
-// resource.TestCheckResourceAttr(
-// "stackitprivatepreview_postgresflexalpha_instance.instance",
-// "flavor.cpu",
-// instanceResource["flavor_cpu"],
-// ),
-// resource.TestCheckResourceAttr(
-// "stackitprivatepreview_postgresflexalpha_instance.instance",
-// "flavor.ram",
-// instanceResource["flavor_ram"],
-// ),
-// resource.TestCheckResourceAttr(
-// "stackitprivatepreview_postgresflexalpha_instance.instance",
-// "replicas",
-// instanceResource["replicas"],
-// ),
-// resource.TestCheckResourceAttr(
-// "stackitprivatepreview_postgresflexalpha_instance.instance",
-// "storage.class",
-// instanceResource["storage_class"],
-// ),
-// resource.TestCheckResourceAttr(
-// "stackitprivatepreview_postgresflexalpha_instance.instance",
-// "storage.size",
-// instanceResource["storage_size"],
-// ),
-// resource.TestCheckResourceAttr(
-// "stackitprivatepreview_postgresflexalpha_instance.instance",
-// "version",
-// instanceResource["version"],
-// ),
-// ),
-// },
-// // Deletion is done by the framework implicitly
-// },
-// },
-// )
-//}
-//
-// func testAccCheckPostgresFlexDestroy(s *terraform.State) error {
-// ctx := context.Background()
-// var client *postgresflex.APIClient
-// var err error
-// if testutils.PostgresFlexCustomEndpoint == "" {
-// client, err = postgresflex.NewAPIClient()
-// } else {
-// client, err = postgresflex.NewAPIClient(
-// config.WithEndpoint(testutils.PostgresFlexCustomEndpoint),
-// )
-// }
-// if err != nil {
-// return fmt.Errorf("creating client: %w", err)
-// }
-//
-// instancesToDestroy := []string{}
-// for _, rs := range s.RootModule().Resources {
-// if rs.Type != "stackitprivatepreview_postgresflexalpha_instance" {
-// continue
-// }
-// // instance terraform ID: = "[project_id],[region],[instance_id]"
-// instanceId := strings.Split(rs.Primary.ID, core.Separator)[2]
-// instancesToDestroy = append(instancesToDestroy, instanceId)
-// }
-//
-// instancesResp, err := client.ListInstancesRequest(ctx, testutils.ProjectId, testutils.Region).Execute()
-// if err != nil {
-// return fmt.Errorf("getting instancesResp: %w", err)
-// }
-//
-// items := *instancesResp.Instances
-// for i := range items {
-// if items[i].Id == nil {
-// continue
-// }
-// if utils.Contains(instancesToDestroy, *items[i].Id) {
-// // TODO @mhenselin - does force still exist?
-// err := client.DeleteInstanceRequestExecute(ctx, testutils.ProjectId, testutils.Region, *items[i].Id)
-// if err != nil {
-// return fmt.Errorf("deleting instance %s during CheckDestroy: %w", *items[i].Id, err)
-// }
-// }
-// }
-// return nil
-//}
-
func testAccCheckPostgresFlexDestroy(s *terraform.State) error {
- testutils.Setup()
-
- pID, ok := os.LookupEnv("TF_ACC_PROJECT_ID")
- if !ok {
- log.Fatalln("unable to read TF_ACC_PROJECT_ID")
- }
-
ctx := context.Background()
- var client *v3alpha1api.APIClient
+ var client *postgresflex.APIClient
var err error
-
- var region, projectID string
- region = testutils.Region
- if region == "" {
- region = "eu01"
- }
-
- projectID = pID
- if projectID == "" {
- return fmt.Errorf("projectID could not be determined in destroy function")
- }
-
- apiClientConfigOptions := []config.ConfigurationOption{
- config.WithServiceAccountKeyPath(os.Getenv("TF_ACC_SERVICE_ACCOUNT_FILE")),
- config.WithRegion(region),
- }
- if testutils.PostgresFlexCustomEndpoint != "" {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithEndpoint(testutils.PostgresFlexCustomEndpoint),
+ if testutil.PostgresFlexCustomEndpoint == "" {
+ client, err = postgresflex.NewAPIClient()
+ } else {
+ client, err = postgresflex.NewAPIClient(
+ config.WithEndpoint(testutil.PostgresFlexCustomEndpoint),
)
}
- client, err = v3alpha1api.NewAPIClient(apiClientConfigOptions...)
if err != nil {
- log.Fatalln(err)
+ return fmt.Errorf("creating client: %w", err)
}
instancesToDestroy := []string{}
for _, rs := range s.RootModule().Resources {
- if rs.Type != "stackitprivatepreview_postgresflexalpha_instance" &&
- rs.Type != "stackitprivatepreview_postgresflexbeta_instance" {
+ if rs.Type != "stackit_postgresflex_instance" {
continue
}
-
// instance terraform ID: = "[project_id],[region],[instance_id]"
- instanceID := strings.Split(rs.Primary.ID, core.Separator)[2]
- instancesToDestroy = append(instancesToDestroy, instanceID)
+ instanceId := strings.Split(rs.Primary.ID, core.Separator)[2]
+ instancesToDestroy = append(instancesToDestroy, instanceId)
}
- instancesResp, err := client.DefaultAPI.ListInstancesRequest(ctx, projectID, region).
- Size(100).
- Execute()
+ instancesResp, err := client.ListInstancesRequest(ctx, testutil.ProjectId, testutil.Region).Execute()
if err != nil {
return fmt.Errorf("getting instancesResp: %w", err)
}
- items := instancesResp.GetInstances()
+ items := *instancesResp.Instances
for i := range items {
- if items[i].Id == "" {
+ if items[i].Id == nil {
continue
}
- if utils.Contains(instancesToDestroy, items[i].Id) {
- err := client.DefaultAPI.DeleteInstanceRequest(ctx, testutils.ProjectId, region, items[i].Id).Execute()
+ if utils.Contains(instancesToDestroy, *items[i].Id) {
+ // TODO @mhenselin - does force still exist?
+ err := client.DeleteInstanceRequestExecute(ctx, testutil.ProjectId, testutil.Region, *items[i].Id)
if err != nil {
- return fmt.Errorf("deleting instance %s during CheckDestroy: %w", items[i].Id, err)
- }
- err = postgresflexalpha.DeleteInstanceWaitHandler(
- ctx,
- client.DefaultAPI,
- testutils.ProjectId,
- testutils.Region,
- items[i].Id,
- 15*time.Minute,
- 10*time.Second,
- )
- if err != nil {
- return fmt.Errorf("deleting instance %s during CheckDestroy: waiting for deletion %w", items[i].Id, err)
+ return fmt.Errorf("deleting instance %s during CheckDestroy: %w", *items[i].Id, err)
}
}
}
diff --git a/stackit/internal/services/postgresflexalpha/testdata/instance_template.gompl b/stackit/internal/services/postgresflexalpha/testdata/instance_template.gompl
deleted file mode 100644
index d0ab3f25..00000000
--- a/stackit/internal/services/postgresflexalpha/testdata/instance_template.gompl
+++ /dev/null
@@ -1,54 +0,0 @@
-provider "stackitprivatepreview" {
- default_region = "{{ .Region }}"
- service_account_key_path = "{{ .ServiceAccountFilePath }}"
-}
-
-resource "stackitprivatepreview_postgresflexalpha_instance" "{{ .TfName }}" {
- project_id = "{{ .ProjectID }}"
- name = "{{ .Name }}"
- backup_schedule = "{{ .BackupSchedule }}"
- retention_days = {{ .RetentionDays }}
- flavor_id = "{{ .FlavorID }}"
- replicas = {{ .Replicas }}
- storage = {
- performance_class = "{{ .PerformanceClass }}"
- size = {{ .Size }}
- }
-{{ if .UseEncryption }}
- encryption = {
- kek_key_id = "{{ .KekKeyID }}"
- kek_key_ring_id = "{{ .KekKeyRingID }}"
- kek_key_version = {{ .KekKeyVersion }}
- service_account = "{{ .KekServiceAccount }}"
- }
-{{ end }}
- network = {
- acl = ["{{ .ACLString }}"]
- access_scope = "{{ .AccessScope }}"
- }
- version = {{ .Version }}
-}
-
-{{ if .Users }}
-{{ $tfName := .TfName }}
-{{ range $user := .Users }}
-resource "stackitprivatepreview_postgresflexalpha_user" "{{ $user.Name }}" {
- project_id = "{{ $user.ProjectID }}"
- instance_id = stackitprivatepreview_postgresflexalpha_instance.{{ $tfName }}.instance_id
- name = "{{ $user.Name }}"
- roles = [{{ range $i, $v := $user.Roles }}{{if $i}},{{end}}"{{$v}}"{{end}}]
-}
-{{ end }}
-{{ end }}
-
-{{ if .Databases }}
-{{ $tfName := .TfName }}
-{{ range $db := .Databases }}
-resource "stackitprivatepreview_postgresflexalpha_database" "{{ $db.Name }}" {
- project_id = "{{ $db.ProjectID }}"
- instance_id = stackitprivatepreview_postgresflexalpha_instance.{{ $tfName }}.instance_id
- name = "{{ $db.Name }}"
- owner = stackitprivatepreview_postgresflexalpha_user.{{ $db.Owner }}.name
-}
-{{ end }}
-{{ end }}
diff --git a/stackit/internal/services/postgresflexalpha/testdata/resource-complete.tf b/stackit/internal/services/postgresflexalpha/testdata/resource-complete.tf
new file mode 100644
index 00000000..8b137891
--- /dev/null
+++ b/stackit/internal/services/postgresflexalpha/testdata/resource-complete.tf
@@ -0,0 +1 @@
+
diff --git a/stackit/internal/services/postgresflexalpha/user/datasource.go b/stackit/internal/services/postgresflexalpha/user/datasource.go
index 77deaa46..70d05aba 100644
--- a/stackit/internal/services/postgresflexalpha/user/datasource.go
+++ b/stackit/internal/services/postgresflexalpha/user/datasource.go
@@ -5,21 +5,22 @@ import (
"fmt"
"math"
"net/http"
+ "strconv"
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
-
+ postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- postgresflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user/datasources_gen"
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
+ "github.com/hashicorp/terraform-plugin-framework/attr"
"github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-log/tflog"
-
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/validate"
+
+ "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/types"
)
// Ensure the implementation satisfies the expected interfaces.
@@ -27,20 +28,28 @@ var (
_ datasource.DataSource = &userDataSource{}
)
+type DataSourceModel struct {
+ Id types.String `tfsdk:"id"` // needed by TF
+ UserId types.Int64 `tfsdk:"user_id"`
+ InstanceId types.String `tfsdk:"instance_id"`
+ ProjectId types.String `tfsdk:"project_id"`
+ Username types.String `tfsdk:"username"`
+ Roles types.Set `tfsdk:"roles"`
+ Host types.String `tfsdk:"host"`
+ Port types.Int64 `tfsdk:"port"`
+ Region types.String `tfsdk:"region"`
+ Status types.String `tfsdk:"status"`
+ ConnectionString types.String `tfsdk:"connection_string"`
+}
+
// NewUserDataSource is a helper function to simplify the provider implementation.
func NewUserDataSource() datasource.DataSource {
return &userDataSource{}
}
-// dataSourceModel maps the data source schema data.
-type dataSourceModel struct {
- postgresflexalpha.UserModel
- TerraformID types.String `tfsdk:"id"`
-}
-
// userDataSource is the data source implementation.
type userDataSource struct {
- client *v3alpha1api.APIClient
+ client *postgresflex.APIClient
providerData core.ProviderData
}
@@ -74,16 +83,84 @@ func (r *userDataSource) Configure(
}
// Schema defines the schema for the data source.
-func (r *userDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- s := postgresflexalpha.UserDataSourceSchema(ctx)
- s.Attributes["id"] = schema.StringAttribute{
- Description: "Terraform's internal resource ID. It is structured as \\\"`project_id`,`region`,`instance_id`," +
- "`user_id`\\\".\",",
- Optional: true,
- Computed: true,
+func (r *userDataSource) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
+ descriptions := map[string]string{
+ "main": "Postgres Flex user data source schema. Must have a `region` specified in the provider configuration.",
+ "id": "Terraform's internal data source. ID. It is structured as \"`project_id`,`region`,`instance_id`,`user_id`\".",
+ "user_id": "User ID.",
+ "instance_id": "ID of the PostgresFlex instance.",
+ "project_id": "STACKIT project ID to which the instance is associated.",
+ "username": "The name of the user.",
+ "roles": "The roles assigned to the user.",
+ "host": "The host address for the user to connect to the instance.",
+ "port": "The port number for the user to connect to the instance.",
+ "region": "The resource region. If not defined, the provider region is used.",
+ "status": "The current status of the user.",
+ "connection_string": "The connection string for the user to the instance.",
}
- resp.Schema = s
+ resp.Schema = schema.Schema{
+ Description: descriptions["main"],
+ Attributes: map[string]schema.Attribute{
+ "id": schema.StringAttribute{
+ Description: descriptions["id"],
+ Computed: true,
+ },
+ "user_id": schema.StringAttribute{
+ Description: descriptions["user_id"],
+ Required: true,
+ Validators: []validator.String{
+ validate.NoSeparator(),
+ },
+ },
+ "instance_id": schema.StringAttribute{
+ Description: descriptions["instance_id"],
+ Required: true,
+ Validators: []validator.String{
+ validate.UUID(),
+ validate.NoSeparator(),
+ },
+ },
+ "project_id": schema.StringAttribute{
+ Description: descriptions["project_id"],
+ Required: true,
+ Validators: []validator.String{
+ validate.UUID(),
+ validate.NoSeparator(),
+ },
+ },
+ "username": schema.StringAttribute{
+ Description: descriptions["username"],
+ Computed: true,
+ },
+ "roles": schema.SetAttribute{
+ Description: descriptions["roles"],
+ ElementType: types.StringType,
+ Computed: true,
+ },
+ "host": schema.StringAttribute{
+ Description: descriptions["host"],
+ Computed: true,
+ },
+ "port": schema.Int64Attribute{
+ Description: descriptions["port"],
+ Computed: true,
+ },
+ "region": schema.StringAttribute{
+ // the region cannot be found automatically, so it has to be passed
+ Optional: true,
+ Description: descriptions["region"],
+ },
+ "status": schema.StringAttribute{
+ Description: descriptions["status"],
+ Computed: true,
+ },
+ "connection_string": schema.StringAttribute{
+ Description: descriptions["connection_string"],
+ Computed: true,
+ },
+ },
+ }
}
// Read refreshes the Terraform state with the latest data.
@@ -92,7 +169,7 @@ func (r *userDataSource) Read(
req datasource.ReadRequest,
resp *datasource.ReadResponse,
) { // nolint:gocritic // function signature required by Terraform
- var model dataSourceModel
+ var model DataSourceModel
diags := req.Config.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
@@ -101,24 +178,38 @@ func (r *userDataSource) Read(
ctx = core.InitProviderContext(ctx)
- projectID := model.ProjectId.ValueString()
- instanceID := model.InstanceId.ValueString()
- userID64 := model.UserId.ValueInt64()
- if userID64 > math.MaxInt32 {
+ projectId := model.ProjectId.ValueString()
+ instanceId := model.InstanceId.ValueString()
+ userId64 := model.UserId.ValueInt64()
+ if userId64 > math.MaxInt32 {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (userId)")
return
}
- userID := int32(userID64) // nolint:gosec // check is performed above
+ userId := int32(userId64)
region := r.providerData.GetRegionWithOverride(model.Region)
- ctx = tflog.SetField(ctx, "project_id", projectID)
- ctx = tflog.SetField(ctx, "instance_id", instanceID)
+ ctx = tflog.SetField(ctx, "project_id", projectId)
+ ctx = tflog.SetField(ctx, "instance_id", instanceId)
+ ctx = tflog.SetField(ctx, "user_id", userId)
ctx = tflog.SetField(ctx, "region", region)
- ctx = tflog.SetField(ctx, "user_id", userID)
- recordSetResp, err := r.client.DefaultAPI.GetUserRequest(ctx, projectID, region, instanceID, userID).Execute()
+ recordSetResp, err := r.client.GetUserRequest(ctx, projectId, region, instanceId, userId).Execute()
if err != nil {
- handleReadError(ctx, &diags, err, projectID, instanceID, userID)
+ utils.LogError(
+ ctx,
+ &resp.Diagnostics,
+ err,
+ "Reading user",
+ fmt.Sprintf(
+ "User with ID %q or instance with ID %q does not exist in project %q.",
+ userId,
+ instanceId,
+ projectId,
+ ),
+ map[int]string{
+ http.StatusForbidden: fmt.Sprintf("Project with ID %q not found or forbidden access", projectId),
+ },
+ )
resp.State.RemoveResource(ctx)
return
}
@@ -146,38 +237,47 @@ func (r *userDataSource) Read(
tflog.Info(ctx, "Postgres Flex user read")
}
-// handleReadError centralizes API error handling for the Read operation.
-func handleReadError(
- ctx context.Context,
- diags *diag.Diagnostics,
- err error,
- projectID, instanceID string,
- userID int32,
-) {
- utils.LogError(
- ctx,
- diags,
- err,
- "Reading user",
- fmt.Sprintf(
- "User with ID %q or instance with ID %q does not exist in project %q.",
- userID,
- instanceID,
- projectID,
- ),
- map[int]string{
- http.StatusBadRequest: fmt.Sprintf(
- "Invalid user request parameters for project %q and instance %q.",
- projectID,
- instanceID,
- ),
- http.StatusNotFound: fmt.Sprintf(
- "User, instance %q, or project %q or user %q not found.",
- instanceID,
- projectID,
- userID,
- ),
- http.StatusForbidden: fmt.Sprintf("Forbidden access to project %q.", projectID),
- },
+func mapDataSourceFields(userResp *postgresflex.GetUserResponse, model *DataSourceModel, region string) error {
+ if userResp == nil {
+ return fmt.Errorf("response is nil")
+ }
+ if model == nil {
+ return fmt.Errorf("model input is nil")
+ }
+ user := userResp
+
+ var userId int64
+ if model.UserId.ValueInt64() != 0 {
+ userId = model.UserId.ValueInt64()
+ } else if user.Id != nil {
+ userId = *user.Id
+ } else {
+ return fmt.Errorf("user id not present")
+ }
+
+ model.Id = utils.BuildInternalTerraformId(
+ model.ProjectId.ValueString(), region, model.InstanceId.ValueString(), strconv.FormatInt(userId, 10),
)
+ model.UserId = types.Int64Value(userId)
+ model.Username = types.StringPointerValue(user.Name)
+
+ if user.Roles == nil {
+ model.Roles = types.SetNull(types.StringType)
+ } else {
+ var roles []attr.Value
+ for _, role := range *user.Roles {
+ roles = append(roles, types.StringValue(string(role)))
+ }
+ rolesSet, diags := types.SetValue(types.StringType, roles)
+ if diags.HasError() {
+ return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags))
+ }
+ model.Roles = rolesSet
+ }
+ model.Host = types.StringPointerValue(user.Host)
+ model.Port = types.Int64PointerValue(user.Port)
+ model.Region = types.StringValue(region)
+ model.Status = types.StringPointerValue(user.Status)
+ model.ConnectionString = types.StringPointerValue(user.ConnectionString)
+ return nil
}
diff --git a/stackit/internal/services/postgresflexalpha/user/datasource_test.go b/stackit/internal/services/postgresflexalpha/user/datasource_test.go
new file mode 100644
index 00000000..679bef85
--- /dev/null
+++ b/stackit/internal/services/postgresflexalpha/user/datasource_test.go
@@ -0,0 +1,146 @@
+package postgresflexalpha
+
+import (
+ "testing"
+
+ "github.com/google/go-cmp/cmp"
+ "github.com/hashicorp/terraform-plugin-framework/attr"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/stackitcloud/stackit-sdk-go/core/utils"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
+)
+
+func TestMapDataSourceFields(t *testing.T) {
+ const testRegion = "region"
+ tests := []struct {
+ description string
+ input *postgresflexalpha.GetUserResponse
+ region string
+ expected DataSourceModel
+ isValid bool
+ }{
+ {
+ "default_values",
+ &postgresflexalpha.GetUserResponse{},
+ testRegion,
+ DataSourceModel{
+ Id: types.StringValue("pid,region,iid,1"),
+ UserId: types.Int64Value(1),
+ InstanceId: types.StringValue("iid"),
+ ProjectId: types.StringValue("pid"),
+ Username: types.StringNull(),
+ Roles: types.SetNull(types.StringType),
+ Host: types.StringNull(),
+ Port: types.Int64Null(),
+ Region: types.StringValue(testRegion),
+ },
+ true,
+ },
+ {
+ "simple_values",
+ &postgresflexalpha.GetUserResponse{
+ Roles: &[]postgresflexalpha.UserRole{
+ "role_1",
+ "role_2",
+ "",
+ },
+ Name: utils.Ptr("username"),
+ Host: utils.Ptr("host"),
+ Port: utils.Ptr(int64(1234)),
+ },
+ testRegion,
+ DataSourceModel{
+ Id: types.StringValue("pid,region,iid,1"),
+ UserId: types.Int64Value(1),
+ InstanceId: types.StringValue("iid"),
+ ProjectId: types.StringValue("pid"),
+ Username: types.StringValue("username"),
+ Roles: types.SetValueMust(
+ types.StringType, []attr.Value{
+ types.StringValue("role_1"),
+ types.StringValue("role_2"),
+ types.StringValue(""),
+ },
+ ),
+ Host: types.StringValue("host"),
+ Port: types.Int64Value(1234),
+ Region: types.StringValue(testRegion),
+ Status: types.StringNull(),
+ ConnectionString: types.StringNull(),
+ },
+ true,
+ },
+ {
+ "null_fields_and_int_conversions",
+ &postgresflexalpha.GetUserResponse{
+ Id: utils.Ptr(int64(1)),
+ Roles: &[]postgresflexalpha.UserRole{},
+ Name: nil,
+ Host: nil,
+ Port: utils.Ptr(int64(2123456789)),
+ Status: utils.Ptr("status"),
+ ConnectionString: utils.Ptr("connection_string"),
+ },
+ testRegion,
+ DataSourceModel{
+ Id: types.StringValue("pid,region,iid,1"),
+ UserId: types.Int64Value(1),
+ InstanceId: types.StringValue("iid"),
+ ProjectId: types.StringValue("pid"),
+ Username: types.StringNull(),
+ Roles: types.SetValueMust(types.StringType, []attr.Value{}),
+ Host: types.StringNull(),
+ Port: types.Int64Value(2123456789),
+ Region: types.StringValue(testRegion),
+ Status: types.StringValue("status"),
+ ConnectionString: types.StringValue("connection_string"),
+ },
+ true,
+ },
+ {
+ "nil_response",
+ nil,
+ testRegion,
+ DataSourceModel{},
+ false,
+ },
+ {
+ "nil_response_2",
+ &postgresflexalpha.GetUserResponse{},
+ testRegion,
+ DataSourceModel{},
+ false,
+ },
+ {
+ "no_resource_id",
+ &postgresflexalpha.GetUserResponse{},
+ testRegion,
+ DataSourceModel{},
+ false,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(
+ tt.description, func(t *testing.T) {
+ state := &DataSourceModel{
+ ProjectId: tt.expected.ProjectId,
+ InstanceId: tt.expected.InstanceId,
+ UserId: tt.expected.UserId,
+ }
+ err := mapDataSourceFields(tt.input, state, tt.region)
+ if !tt.isValid && err == nil {
+ t.Fatalf("Should have failed")
+ }
+ if tt.isValid && err != nil {
+ t.Fatalf("Should not have failed: %v", err)
+ }
+ if tt.isValid {
+ diff := cmp.Diff(state, &tt.expected)
+ if diff != "" {
+ t.Fatalf("Data does not match: %s", diff)
+ }
+ }
+ },
+ )
+ }
+}
diff --git a/stackit/internal/services/postgresflexalpha/user/datasources_gen/user_data_source_gen.go b/stackit/internal/services/postgresflexalpha/user/datasources_gen/user_data_source_gen.go
index 29a7cca0..fb2a7644 100644
--- a/stackit/internal/services/postgresflexalpha/user/datasources_gen/user_data_source_gen.go
+++ b/stackit/internal/services/postgresflexalpha/user/datasources_gen/user_data_source_gen.go
@@ -14,7 +14,17 @@ import (
func UserDataSourceSchema(ctx context.Context) schema.Schema {
return schema.Schema{
Attributes: map[string]schema.Attribute{
- "tf_original_api_id": schema.Int64Attribute{
+ "connection_string": schema.StringAttribute{
+ Computed: true,
+ Description: "The connection string for the user to the instance.",
+ MarkdownDescription: "The connection string for the user to the instance.",
+ },
+ "host": schema.StringAttribute{
+ Computed: true,
+ Description: "The host of the instance in which the user belongs to.",
+ MarkdownDescription: "The host of the instance in which the user belongs to.",
+ },
+ "id": schema.Int64Attribute{
Computed: true,
Description: "The ID of the user.",
MarkdownDescription: "The ID of the user.",
@@ -29,6 +39,11 @@ func UserDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The name of the user.",
MarkdownDescription: "The name of the user.",
},
+ "port": schema.Int64Attribute{
+ Computed: true,
+ Description: "The port of the instance in which the user belongs to.",
+ MarkdownDescription: "The port of the instance in which the user belongs to.",
+ },
"project_id": schema.StringAttribute{
Required: true,
Description: "The STACKIT project ID.",
@@ -65,12 +80,15 @@ func UserDataSourceSchema(ctx context.Context) schema.Schema {
}
type UserModel struct {
- Id types.Int64 `tfsdk:"tf_original_api_id"`
- InstanceId types.String `tfsdk:"instance_id"`
- Name types.String `tfsdk:"name"`
- ProjectId types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- Roles types.List `tfsdk:"roles"`
- Status types.String `tfsdk:"status"`
- UserId types.Int64 `tfsdk:"user_id"`
+ ConnectionString types.String `tfsdk:"connection_string"`
+ Host types.String `tfsdk:"host"`
+ Id types.Int64 `tfsdk:"id"`
+ InstanceId types.String `tfsdk:"instance_id"`
+ Name types.String `tfsdk:"name"`
+ Port types.Int64 `tfsdk:"port"`
+ ProjectId types.String `tfsdk:"project_id"`
+ Region types.String `tfsdk:"region"`
+ Roles types.List `tfsdk:"roles"`
+ Status types.String `tfsdk:"status"`
+ UserId types.Int64 `tfsdk:"user_id"`
}
diff --git a/stackit/internal/services/postgresflexalpha/user/datasources_gen/users_data_source_gen.go b/stackit/internal/services/postgresflexalpha/user/datasources_gen/users_data_source_gen.go
index bc83be6b..b54a5dd6 100644
--- a/stackit/internal/services/postgresflexalpha/user/datasources_gen/users_data_source_gen.go
+++ b/stackit/internal/services/postgresflexalpha/user/datasources_gen/users_data_source_gen.go
@@ -86,6 +86,8 @@ func UsersDataSourceSchema(ctx context.Context) schema.Schema {
stringvalidator.OneOf(
"id.asc",
"id.desc",
+ "index.desc",
+ "index.asc",
"name.desc",
"name.asc",
"status.desc",
diff --git a/stackit/internal/services/postgresflexalpha/user/mapper.go b/stackit/internal/services/postgresflexalpha/user/mapper.go
deleted file mode 100644
index dcf4545c..00000000
--- a/stackit/internal/services/postgresflexalpha/user/mapper.go
+++ /dev/null
@@ -1,139 +0,0 @@
-package postgresflexalpha
-
-import (
- "fmt"
- "strconv"
-
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-)
-
-// mapDataSourceFields maps API response to data source model, preserving existing ID.
-func mapDataSourceFields(userResp *v3alpha1api.GetUserResponse, model *dataSourceModel, region string) error {
- if userResp == nil {
- return fmt.Errorf("response is nil")
- }
- if model == nil {
- return fmt.Errorf("model input is nil")
- }
- user := userResp
-
- var userID int64
- if model.UserId.ValueInt64() == 0 {
- return fmt.Errorf("user id not present")
- }
- userID = model.UserId.ValueInt64()
-
- model.TerraformID = utils.BuildInternalTerraformId(
- model.ProjectId.ValueString(), region, model.InstanceId.ValueString(), strconv.FormatInt(userID, 10),
- )
-
- model.UserId = types.Int64Value(userID)
- model.Name = types.StringValue(user.GetName())
-
- if user.Roles == nil {
- model.Roles = types.List(types.SetNull(types.StringType))
- } else {
- var roles []attr.Value
- for _, role := range user.Roles {
- roles = append(roles, types.StringValue(string(role)))
- }
- rolesSet, diags := types.SetValue(types.StringType, roles)
- if diags.HasError() {
- return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags))
- }
- model.Roles = types.List(rolesSet)
- }
-
- model.Id = types.Int64Value(userID)
- model.Region = types.StringValue(region)
- model.Status = types.StringValue(user.GetStatus())
- return nil
-}
-
-// toPayloadRoles converts a string slice to the API's role type.
-func toPayloadRoles(roles []string) []v3alpha1api.UserRole {
- var userRoles = make([]v3alpha1api.UserRole, 0, len(roles))
- for _, role := range roles {
- userRoles = append(userRoles, v3alpha1api.UserRole(role))
- }
- return userRoles
-}
-
-// toUpdatePayload creates an API update payload from the resource model.
-func toUpdatePayload(model *resourceModel, roles []string) (
- *v3alpha1api.UpdateUserRequestPayload,
- error,
-) {
- if model == nil {
- return nil, fmt.Errorf("nil model")
- }
- if roles == nil {
- return nil, fmt.Errorf("nil roles")
- }
-
- return &v3alpha1api.UpdateUserRequestPayload{
- Name: model.Name.ValueStringPointer(),
- Roles: toPayloadRoles(roles),
- }, nil
-}
-
-// toCreatePayload creates an API create payload from the resource model.
-func toCreatePayload(model *resourceModel, roles []string) (*v3alpha1api.CreateUserRequestPayload, error) {
- if model == nil {
- return nil, fmt.Errorf("nil model")
- }
- if roles == nil {
- return nil, fmt.Errorf("nil roles")
- }
-
- return &v3alpha1api.CreateUserRequestPayload{
- Roles: toPayloadRoles(roles),
- Name: model.Name.ValueString(),
- }, nil
-}
-
-// mapResourceFields maps API response to the resource model, preserving existing ID.
-func mapResourceFields(userResp *v3alpha1api.GetUserResponse, model *resourceModel, region string) error {
- if userResp == nil {
- return fmt.Errorf("response is nil")
- }
- if model == nil {
- return fmt.Errorf("model input is nil")
- }
- user := userResp
-
- var userID int64
- if !model.UserId.IsNull() && !model.UserId.IsUnknown() && model.UserId.ValueInt64() != 0 {
- userID = model.UserId.ValueInt64()
- } else if user.Id != 0 {
- userID = int64(user.Id)
- } else {
- return fmt.Errorf("user id not present")
- }
-
- model.Id = types.Int64Value(userID)
- model.UserId = types.Int64Value(userID)
- model.Name = types.StringValue(user.Name)
-
- if user.Roles == nil {
- model.Roles = types.List(types.SetNull(types.StringType))
- } else {
- var roles []attr.Value
- for _, role := range user.Roles {
- roles = append(roles, types.StringValue(string(role)))
- }
- rolesSet, diags := types.SetValue(types.StringType, roles)
- if diags.HasError() {
- return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags))
- }
- model.Roles = types.List(rolesSet)
- }
- model.Region = types.StringValue(region)
- model.Status = types.StringValue(user.Status)
- return nil
-}
diff --git a/stackit/internal/services/postgresflexalpha/user/mapper_test.go b/stackit/internal/services/postgresflexalpha/user/mapper_test.go
deleted file mode 100644
index 5b07ede8..00000000
--- a/stackit/internal/services/postgresflexalpha/user/mapper_test.go
+++ /dev/null
@@ -1,573 +0,0 @@
-package postgresflexalpha
-
-import (
- "testing"
-
- "github.com/google/go-cmp/cmp"
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/stackitcloud/stackit-sdk-go/core/utils"
-
- postgresflex "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
-
- data "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user/datasources_gen"
-)
-
-func TestMapDataSourceFields(t *testing.T) {
- const testRegion = "region"
- tests := []struct {
- description string
- input *postgresflex.GetUserResponse
- region string
- expected dataSourceModel
- isValid bool
- }{
- {
- "default_values",
- &postgresflex.GetUserResponse{},
- testRegion,
- dataSourceModel{
- UserModel: data.UserModel{
- Id: types.Int64Value(1),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Name: types.StringValue(""),
- Roles: types.List(types.SetNull(types.StringType)),
- Status: types.StringValue(""),
- Region: types.StringValue(testRegion),
- },
- TerraformID: types.StringValue("pid,region,iid,1"),
- },
- true,
- },
- {
- "simple_values",
- &postgresflex.GetUserResponse{
- Roles: []postgresflex.UserRole{
- "role_1",
- "role_2",
- "",
- },
- Name: "username",
- },
- testRegion,
- dataSourceModel{
- UserModel: data.UserModel{
- Id: types.Int64Value(1),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Name: types.StringValue("username"),
- Roles: types.List(
- types.SetValueMust(
- types.StringType, []attr.Value{
- types.StringValue("role_1"),
- types.StringValue("role_2"),
- types.StringValue(""),
- },
- ),
- ),
- Region: types.StringValue(testRegion),
- Status: types.StringValue(""),
- },
- TerraformID: types.StringValue("pid,region,iid,1"),
- },
- true,
- },
- {
- "null_fields_and_int_conversions",
- &postgresflex.GetUserResponse{
- Id: int32(1),
- Roles: []postgresflex.UserRole{},
- Name: "",
- Status: "status",
- },
- testRegion,
- dataSourceModel{
- UserModel: data.UserModel{
- Id: types.Int64Value(1),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Name: types.StringValue(""),
- Roles: types.List(types.SetValueMust(types.StringType, []attr.Value{})),
- Region: types.StringValue(testRegion),
- Status: types.StringValue("status"),
- },
- TerraformID: types.StringValue("pid,region,iid,1"),
- },
- true,
- },
- {
- "nil_response",
- nil,
- testRegion,
- dataSourceModel{},
- false,
- },
- {
- "nil_response_2",
- &postgresflex.GetUserResponse{},
- testRegion,
- dataSourceModel{},
- false,
- },
- {
- "no_resource_id",
- &postgresflex.GetUserResponse{},
- testRegion,
- dataSourceModel{},
- false,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.description, func(t *testing.T) {
- state := &dataSourceModel{
- UserModel: data.UserModel{
- ProjectId: tt.expected.ProjectId,
- InstanceId: tt.expected.InstanceId,
- UserId: tt.expected.UserId,
- },
- }
- err := mapDataSourceFields(tt.input, state, tt.region)
- if !tt.isValid && err == nil {
- t.Fatalf("Should have failed")
- }
- if tt.isValid && err != nil {
- t.Fatalf("Should not have failed: %v", err)
- }
- if tt.isValid {
- diff := cmp.Diff(state, &tt.expected)
- if diff != "" {
- t.Fatalf("Data does not match: %s", diff)
- }
- }
- },
- )
- }
-}
-
-func TestMapFieldsCreate(t *testing.T) {
- const testRegion = "region"
- tests := []struct {
- description string
- input *postgresflex.GetUserResponse
- region string
- expected resourceModel
- isValid bool
- }{
- {
- "default_values",
- &postgresflex.GetUserResponse{
- Id: int32(1),
- },
- testRegion,
- resourceModel{
- Id: types.Int64Value(1),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Name: types.StringValue(""),
- Roles: types.List(types.SetNull(types.StringType)),
- Password: types.StringNull(),
- Region: types.StringValue(testRegion),
- Status: types.StringValue(""),
- //ConnectionString: types.StringNull(),
- },
- true,
- },
- {
- "simple_values",
- &postgresflex.GetUserResponse{
- Id: int32(1),
- Name: "username",
- Status: "status",
- },
- testRegion,
- resourceModel{
- Id: types.Int64Value(1),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Name: types.StringValue("username"),
- Roles: types.List(types.SetNull(types.StringType)),
- Password: types.StringNull(),
- Region: types.StringValue(testRegion),
- Status: types.StringValue("status"),
- //ConnectionString: types.StringNull(),
- },
- true,
- },
- {
- "null_fields_and_int_conversions",
- &postgresflex.GetUserResponse{
- Id: int32(1),
- Name: "",
- Status: "",
- },
- testRegion,
- resourceModel{
- Id: types.Int64Value(1),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Name: types.StringValue(""),
- Roles: types.List(types.SetNull(types.StringType)),
- Password: types.StringNull(),
- Region: types.StringValue(testRegion),
- Status: types.StringValue(""),
- //ConnectionString: types.StringNull(),
- },
- true,
- },
- {
- "nil_response",
- nil,
- testRegion,
- resourceModel{},
- false,
- },
- {
- "nil_response_2",
- &postgresflex.GetUserResponse{},
- testRegion,
- resourceModel{},
- false,
- },
- {
- "no_resource_id",
- &postgresflex.GetUserResponse{},
- testRegion,
- resourceModel{},
- false,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.description, func(t *testing.T) {
- state := &resourceModel{
- ProjectId: tt.expected.ProjectId,
- InstanceId: tt.expected.InstanceId,
- }
-
- err := mapResourceFields(tt.input, state, tt.region)
- if !tt.isValid && err == nil {
- t.Fatalf("Should have failed")
- }
- if tt.isValid && err != nil {
- t.Fatalf("Should not have failed: %v", err)
- }
- if tt.isValid {
- diff := cmp.Diff(&tt.expected, state)
- if diff != "" {
- t.Fatalf("Data does not match: %s", diff)
- }
- }
- },
- )
- }
-}
-
-func TestMapFields(t *testing.T) {
- const testRegion = "region"
- tests := []struct {
- description string
- input *postgresflex.GetUserResponse
- region string
- expected resourceModel
- isValid bool
- }{
- {
- "default_values",
- &postgresflex.GetUserResponse{
- Id: int32(1),
- },
- testRegion,
- resourceModel{
- Id: types.Int64Value(1),
- UserId: types.Int64Value(int64(1)),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Name: types.StringValue(""),
- Roles: types.List(types.SetNull(types.StringType)),
- Region: types.StringValue(testRegion),
- Status: types.StringValue(""),
- //ConnectionString: types.StringNull(),
- },
- true,
- },
- {
- "simple_values",
- &postgresflex.GetUserResponse{
- Id: int32(1),
- Roles: []postgresflex.UserRole{
- "role_1",
- "role_2",
- "",
- },
- Name: "username",
- },
- testRegion,
- resourceModel{
- Id: types.Int64Value(1),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Name: types.StringValue("username"),
- Roles: types.List(
- types.SetValueMust(
- types.StringType, []attr.Value{
- types.StringValue("role_1"),
- types.StringValue("role_2"),
- types.StringValue(""),
- },
- ),
- ),
- Region: types.StringValue(testRegion),
- Status: types.StringValue(""),
- //ConnectionString: types.StringNull(),
- },
- true,
- },
- {
- "null_fields_and_int_conversions",
- &postgresflex.GetUserResponse{
- Id: int32(1),
- Name: "",
- },
- testRegion,
- resourceModel{
- Id: types.Int64Value(1),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Name: types.StringValue(""),
- Roles: types.List(types.SetNull(types.StringType)),
- Region: types.StringValue(testRegion),
- Status: types.StringValue(""),
- //ConnectionString: types.StringNull(),
- },
- true,
- },
- {
- "nil_response",
- nil,
- testRegion,
- resourceModel{},
- false,
- },
- {
- "nil_response_2",
- &postgresflex.GetUserResponse{},
- testRegion,
- resourceModel{},
- false,
- },
- {
- "no_resource_id",
- &postgresflex.GetUserResponse{},
- testRegion,
- resourceModel{},
- false,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.description, func(t *testing.T) {
- state := &resourceModel{
- ProjectId: tt.expected.ProjectId,
- InstanceId: tt.expected.InstanceId,
- }
- err := mapResourceFields(tt.input, state, tt.region)
- if !tt.isValid && err == nil {
- t.Fatalf("Should have failed")
- }
- if tt.isValid && err != nil {
- t.Fatalf("Should not have failed: %v", err)
- }
- if tt.isValid {
- diff := cmp.Diff(state, &tt.expected)
- if diff != "" {
- t.Fatalf("Data does not match: %s", diff)
- }
- }
- },
- )
- }
-}
-
-func TestToCreatePayload(t *testing.T) {
- tests := []struct {
- description string
- input *resourceModel
- inputRoles []string
- expected *postgresflex.CreateUserRequestPayload
- isValid bool
- }{
- {
- "default_values",
- &resourceModel{},
- []string{},
- &postgresflex.CreateUserRequestPayload{
- Name: "",
- Roles: []postgresflex.UserRole{},
- },
- true,
- },
- {
- "simple_values",
- &resourceModel{
- Name: types.StringValue("username"),
- },
- []string{
- "role_1",
- "role_2",
- },
- &postgresflex.CreateUserRequestPayload{
- Name: "username",
- Roles: []postgresflex.UserRole{
- "role_1",
- "role_2",
- },
- },
- true,
- },
- {
- "null_fields_and_int_conversions",
- &resourceModel{
- Name: types.StringNull(),
- },
- []string{
- "",
- },
- &postgresflex.CreateUserRequestPayload{
- Roles: []postgresflex.UserRole{
- "",
- },
- Name: "",
- },
- true,
- },
- {
- "nil_model",
- nil,
- []string{},
- nil,
- false,
- },
- {
- "nil_roles",
- &resourceModel{},
- nil,
- nil,
- false,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.description, func(t *testing.T) {
- output, err := toCreatePayload(tt.input, tt.inputRoles)
- if !tt.isValid && err == nil {
- t.Fatalf("Should have failed")
- }
- if tt.isValid && err != nil {
- t.Fatalf("Should not have failed: %v", err)
- }
- if tt.isValid {
- diff := cmp.Diff(output, tt.expected)
- if diff != "" {
- t.Fatalf("Data does not match: %s", diff)
- }
- }
- },
- )
- }
-}
-
-func TestToUpdatePayload(t *testing.T) {
- tests := []struct {
- description string
- input *resourceModel
- inputRoles []string
- expected *postgresflex.UpdateUserRequestPayload
- isValid bool
- }{
- {
- "default_values",
- &resourceModel{},
- []string{},
- &postgresflex.UpdateUserRequestPayload{
- Roles: []postgresflex.UserRole{},
- },
- true,
- },
- {
- "default_values",
- &resourceModel{
- Name: types.StringValue("username"),
- },
- []string{
- "role_1",
- "role_2",
- },
- &postgresflex.UpdateUserRequestPayload{
- Name: utils.Ptr("username"),
- Roles: []postgresflex.UserRole{
- "role_1",
- "role_2",
- },
- },
- true,
- },
- {
- "null_fields_and_int_conversions",
- &resourceModel{
- Name: types.StringNull(),
- },
- []string{
- "",
- },
- &postgresflex.UpdateUserRequestPayload{
- Roles: []postgresflex.UserRole{
- "",
- },
- },
- true,
- },
- {
- "nil_model",
- nil,
- []string{},
- nil,
- false,
- },
- {
- "nil_roles",
- &resourceModel{},
- nil,
- nil,
- false,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.description, func(t *testing.T) {
- output, err := toUpdatePayload(tt.input, tt.inputRoles)
- if !tt.isValid && err == nil {
- t.Fatalf("Should have failed")
- }
- if tt.isValid && err != nil {
- t.Fatalf("Should not have failed: %v", err)
- }
- if tt.isValid {
- diff := cmp.Diff(output, tt.expected)
- if diff != "" {
- t.Fatalf("Data does not match: %s", diff)
- }
- }
- },
- )
- }
-}
diff --git a/stackit/internal/services/postgresflexalpha/user/planModifiers.yaml b/stackit/internal/services/postgresflexalpha/user/planModifiers.yaml
deleted file mode 100644
index e0822704..00000000
--- a/stackit/internal/services/postgresflexalpha/user/planModifiers.yaml
+++ /dev/null
@@ -1,64 +0,0 @@
-fields:
- - name: 'id'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'user_id'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'instance_id'
- validators:
- - validate.NoSeparator
- - validate.UUID
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'project_id'
- validators:
- - validate.NoSeparator
- - validate.UUID
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'name'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'roles'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'password'
- modifiers:
- - 'RequiresReplace'
- - 'UseStateForUnknown'
-
- - name: 'host'
- modifiers:
- - 'RequiresReplace'
- - 'UseStateForUnknown'
-
- - name: 'port'
- modifiers:
- - 'RequiresReplace'
- - 'UseStateForUnknown'
-
- - name: 'region'
- modifiers:
- - 'RequiresReplace'
- - 'RequiresReplace'
-
- - name: 'status'
- modifiers:
- - 'RequiresReplace'
- - 'UseStateForUnknown'
-
- - name: 'connection_string'
- modifiers:
- - 'RequiresReplace'
- - 'UseStateForUnknown'
diff --git a/stackit/internal/services/postgresflexalpha/user/resource.go b/stackit/internal/services/postgresflexalpha/user/resource.go
index 065c9552..4df9577d 100644
--- a/stackit/internal/services/postgresflexalpha/user/resource.go
+++ b/stackit/internal/services/postgresflexalpha/user/resource.go
@@ -2,65 +2,69 @@ package postgresflexalpha
import (
"context"
- _ "embed"
+ "errors"
"fmt"
"math"
- "slices"
+ "net/http"
"strconv"
"strings"
- "time"
"github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
- "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
-
- postgresflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user/resources_gen"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/int64planmodifier"
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
- postgresflexalphaWait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/postgresflexalpha"
- "github.com/hashicorp/terraform-plugin-framework/path"
- "github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/types"
+ postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
+
+ "github.com/hashicorp/terraform-plugin-framework-validators/setvalidator"
+ "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-log/tflog"
-
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/validate"
+
+ "github.com/hashicorp/terraform-plugin-framework/attr"
+ "github.com/hashicorp/terraform-plugin-framework/path"
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/stackitcloud/stackit-sdk-go/core/oapierror"
)
+// Ensure the implementation satisfies the expected interfaces.
var (
- // Ensure the implementation satisfies the expected interfaces.
- _ resource.Resource = &userResource{}
- _ resource.ResourceWithConfigure = &userResource{}
- _ resource.ResourceWithImportState = &userResource{}
- _ resource.ResourceWithModifyPlan = &userResource{}
- _ resource.ResourceWithIdentity = &userResource{}
- _ resource.ResourceWithValidateConfig = &userResource{}
-
- // Error message constants
- extractErrorSummary = "extracting failed"
- extractErrorMessage = "Extracting identity data: %v"
+ _ resource.Resource = &userResource{}
+ _ resource.ResourceWithConfigure = &userResource{}
+ _ resource.ResourceWithImportState = &userResource{}
+ _ resource.ResourceWithModifyPlan = &userResource{}
)
+type Model struct {
+ Id types.String `tfsdk:"id"` // needed by TF
+ UserId types.Int64 `tfsdk:"user_id"`
+ InstanceId types.String `tfsdk:"instance_id"`
+ ProjectId types.String `tfsdk:"project_id"`
+ Username types.String `tfsdk:"username"`
+ Roles types.Set `tfsdk:"roles"`
+ Password types.String `tfsdk:"password"`
+ Host types.String `tfsdk:"host"`
+ Port types.Int64 `tfsdk:"port"`
+ Region types.String `tfsdk:"region"`
+ Status types.String `tfsdk:"status"`
+ ConnectionString types.String `tfsdk:"connection_string"`
+}
+
// NewUserResource is a helper function to simplify the provider implementation.
func NewUserResource() resource.Resource {
return &userResource{}
}
-// resourceModel represents the Terraform resource state for a PostgreSQL Flex user.
-type resourceModel = postgresflexalpha.UserModel
-
-// UserResourceIdentityModel describes the resource's identity attributes.
-type UserResourceIdentityModel struct {
- ProjectID types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- InstanceID types.String `tfsdk:"instance_id"`
- UserID types.Int64 `tfsdk:"user_id"`
-}
-
-// userResource implements the resource handling for a PostgreSQL Flex user.
+// userResource is the resource implementation.
type userResource struct {
- client *v3alpha1api.APIClient
+ client *postgresflex.APIClient
providerData core.ProviderData
}
@@ -71,7 +75,7 @@ func (r *userResource) ModifyPlan(
req resource.ModifyPlanRequest,
resp *resource.ModifyPlanResponse,
) { // nolint:gocritic // function signature required by Terraform
- var configModel resourceModel
+ var configModel Model
// skip initial empty configuration to avoid follow-up errors
if req.Config.Raw.IsNull() {
return
@@ -81,7 +85,7 @@ func (r *userResource) ModifyPlan(
return
}
- var planModel resourceModel
+ var planModel Model
resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...)
if resp.Diagnostics.HasError() {
return
@@ -119,57 +123,116 @@ func (r *userResource) Configure(ctx context.Context, req resource.ConfigureRequ
tflog.Info(ctx, "Postgres Flex user client configured")
}
-//go:embed planModifiers.yaml
-var modifiersFileByte []byte
-
// Schema defines the schema for the resource.
-func (r *userResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
- s := postgresflexalpha.UserResourceSchema(ctx)
+func (r *userResource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
+ rolesOptions := []string{"login", "createdb", "createrole"}
- fields, err := utils.ReadModifiersConfig(modifiersFileByte)
- if err != nil {
- resp.Diagnostics.AddError("error during read modifiers config file", err.Error())
- return
+ descriptions := map[string]string{
+ "main": "Postgres Flex user resource schema. Must have a `region` specified in the provider configuration.",
+ "id": "Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`,`user_id`\".",
+ "user_id": "User ID.",
+ "instance_id": "ID of the PostgresFlex instance.",
+ "project_id": "STACKIT project ID to which the instance is associated.",
+ "username": "The name of the user.",
+ "roles": "Database access levels for the user. " + utils.FormatPossibleValues(rolesOptions...),
+ "region": "The resource region. If not defined, the provider region is used.",
+ "status": "The current status of the user.",
+ "password": "The password for the user. This is only set upon creation.",
+ "host": "The host of the Postgres Flex instance.",
+ "port": "The port of the Postgres Flex instance.",
+ "connection_string": "The connection string for the user to the instance.",
}
- err = utils.AddPlanModifiersToResourceSchema(fields, &s)
- if err != nil {
- resp.Diagnostics.AddError("error adding plan modifiers", err.Error())
- return
- }
- resp.Schema = s
-}
-
-func (r *userResource) ValidateConfig(
- ctx context.Context,
- req resource.ValidateConfigRequest,
- resp *resource.ValidateConfigResponse,
-) {
- var data resourceModel
-
- resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- var roles []string
- diags := data.Roles.ElementsAs(ctx, &roles, false)
- resp.Diagnostics.Append(diags...)
- if diags.HasError() {
- return
- }
-
- var resRoles []string
- for _, role := range roles {
- if slices.Contains(resRoles, role) {
- resp.Diagnostics.AddAttributeError(
- path.Root("roles"),
- "Attribute Configuration Error",
- "defined roles MUST NOT contain duplicates",
- )
- return
- }
- resRoles = append(resRoles, role)
+ resp.Schema = schema.Schema{
+ Description: descriptions["main"],
+ Attributes: map[string]schema.Attribute{
+ "id": schema.StringAttribute{
+ Description: descriptions["id"],
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "user_id": schema.Int64Attribute{
+ Description: descriptions["user_id"],
+ Computed: true,
+ PlanModifiers: []planmodifier.Int64{
+ int64planmodifier.UseStateForUnknown(),
+ },
+ Validators: []validator.Int64{},
+ },
+ "instance_id": schema.StringAttribute{
+ Description: descriptions["instance_id"],
+ Required: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ Validators: []validator.String{
+ validate.UUID(),
+ validate.NoSeparator(),
+ },
+ },
+ "project_id": schema.StringAttribute{
+ Description: descriptions["project_id"],
+ Required: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ Validators: []validator.String{
+ validate.UUID(),
+ validate.NoSeparator(),
+ },
+ },
+ "username": schema.StringAttribute{
+ Description: descriptions["username"],
+ Required: true,
+ PlanModifiers: []planmodifier.String{
+ // stringplanmodifier.RequiresReplace(),
+ },
+ },
+ "roles": schema.SetAttribute{
+ Description: descriptions["roles"],
+ ElementType: types.StringType,
+ Required: true,
+ Validators: []validator.Set{
+ setvalidator.ValueStringsAre(
+ stringvalidator.OneOf(rolesOptions...),
+ ),
+ },
+ },
+ "password": schema.StringAttribute{
+ Description: descriptions["password"],
+ Computed: true,
+ Sensitive: true,
+ },
+ "host": schema.StringAttribute{
+ Description: descriptions["host"],
+ Computed: true,
+ },
+ "port": schema.Int64Attribute{
+ Description: descriptions["port"],
+ Computed: true,
+ },
+ "region": schema.StringAttribute{
+ Optional: true,
+ // must be computed to allow for storing the override value from the provider
+ Computed: true,
+ Description: descriptions["region"],
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ },
+ "status": schema.StringAttribute{
+ Description: descriptions["status"],
+ Computed: true,
+ },
+ "connection_string": schema.StringAttribute{
+ Description: descriptions["connection_string"],
+ Computed: true,
+ },
+ },
}
}
@@ -179,7 +242,7 @@ func (r *userResource) Create(
req resource.CreateRequest,
resp *resource.CreateResponse,
) { // nolint:gocritic // function signature required by Terraform
- var model resourceModel
+ var model Model
diags := req.Plan.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
@@ -187,14 +250,8 @@ func (r *userResource) Create(
}
ctx = core.InitProviderContext(ctx)
-
- arg := &clientArg{
- projectID: model.ProjectId.ValueString(),
- instanceID: model.InstanceId.ValueString(),
- region: r.providerData.GetRegionWithOverride(model.Region),
- }
-
- ctx = r.setTFLogFields(ctx, arg)
+ ctx = r.setTFLogFields(ctx, &model)
+ arg := r.getClientArg(&model)
var roles = r.expandRoles(ctx, model.Roles, &resp.Diagnostics)
if resp.Diagnostics.HasError() {
@@ -202,26 +259,27 @@ func (r *userResource) Create(
}
// Generate API request body from model
- payload, err := toCreatePayload(&model, roles)
+ payload, err := toCreatePayload(&model, &roles)
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating user", fmt.Sprintf("Creating API payload: %v", err))
return
}
-
// Create new user
- userResp, err := r.client.DefaultAPI.CreateUserRequest(
+ userResp, err := r.client.CreateUserRequest(
ctx,
- arg.projectID,
+ arg.projectId,
arg.region,
- arg.instanceID,
+ arg.instanceId,
).CreateUserRequestPayload(*payload).Execute()
+
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating user", fmt.Sprintf("Calling API: %v", err))
return
}
- id, ok := userResp.GetIdOk()
- if !ok || *id == 0 {
+ ctx = core.LogResponse(ctx)
+
+ if userResp.Id == nil || *userResp.Id == 0 {
core.LogAndAddError(
ctx,
&resp.Diagnostics,
@@ -230,75 +288,25 @@ func (r *userResource) Create(
)
return
}
- arg.userID = int64(*id)
+ model.UserId = types.Int64PointerValue(userResp.Id)
+ model.Password = types.StringPointerValue(userResp.Password)
- ctx = tflog.SetField(ctx, "user_id", id)
+ ctx = tflog.SetField(ctx, "user_id", *userResp.Id)
- ctx = core.LogResponse(ctx)
-
- // Set data returned by API in identity
- identity := UserResourceIdentityModel{
- ProjectID: types.StringValue(arg.projectID),
- Region: types.StringValue(arg.region),
- InstanceID: types.StringValue(arg.instanceID),
- UserID: types.Int64Value(int64(*id)),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- model.Id = types.Int64Value(int64(*id))
- model.UserId = types.Int64Value(int64(*id))
- model.Password = types.StringValue(userResp.GetPassword())
- model.Status = types.StringValue(userResp.GetStatus())
-
- waitResp, err := postgresflexalphaWait.GetUserByIdWaitHandler(
- ctx,
- r.client.DefaultAPI,
- arg.projectID,
- arg.instanceID,
- arg.region,
- int64(*id),
- ).SetSleepBeforeWait(
- 10 * time.Second,
- ).SetTimeout(
- 15 * time.Minute,
- ).WaitWithContext(ctx)
+ exists, err := r.getUserResource(ctx, &model)
if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "create user",
- fmt.Sprintf("Instance creation waiting: %v", err),
- )
+ core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating user", fmt.Sprintf("Calling API: %v", err))
return
}
- if waitResp.Id == 0 {
+ if !exists {
core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "create user",
- "Instance creation waiting: returned id is nil",
+ ctx, &resp.Diagnostics, "Error creating user",
+ fmt.Sprintf("User ID '%v' resource not found after creation", model.UserId.ValueInt64()),
)
return
}
- if waitResp.Id != *id {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "create user",
- fmt.Sprintf(
- "Instance creation waiting: returned id is wrong: %+v - %+v",
- waitResp.Id,
- id,
- ),
- )
- return
- }
-
// Set state to fully populated data
diags = resp.State.Set(ctx, model)
resp.Diagnostics.Append(diags...)
@@ -314,7 +322,7 @@ func (r *userResource) Read(
req resource.ReadRequest,
resp *resource.ReadResponse,
) { // nolint:gocritic // function signature required by Terraform
- var model resourceModel
+ var model Model
diags := req.State.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
@@ -323,65 +331,20 @@ func (r *userResource) Read(
ctx = core.InitProviderContext(ctx)
- arg := &clientArg{
- projectID: model.ProjectId.ValueString(),
- instanceID: model.InstanceId.ValueString(),
- region: r.providerData.GetRegionWithOverride(model.Region),
- }
-
- ctx = r.setTFLogFields(ctx, arg)
-
- ctx = core.InitProviderContext(ctx)
-
- // Read resource state
- waitResp, err := postgresflexalphaWait.GetUserByIdWaitHandler(
- ctx,
- r.client.DefaultAPI,
- arg.projectID,
- arg.instanceID,
- arg.region,
- model.UserId.ValueInt64(),
- ).SetSleepBeforeWait(
- 10 * time.Second,
- ).SetTimeout(
- 15 * time.Minute,
- ).WaitWithContext(ctx)
+ exists, err := r.getUserResource(ctx, &model)
if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "read user",
- fmt.Sprintf("Instance creation waiting: %v", err),
- )
+ core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading user", fmt.Sprintf("Calling API: %v", err))
return
}
- if int64(waitResp.Id) != model.UserId.ValueInt64() {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "read user",
- "Instance creation waiting: returned id is nil or wrong",
- )
+ if !exists {
+ resp.State.RemoveResource(ctx)
return
}
- arg.userID = int64(waitResp.Id)
ctx = core.LogResponse(ctx)
- // Set data returned by API in identity
- identity := UserResourceIdentityModel{
- ProjectID: types.StringValue(arg.projectID),
- Region: types.StringValue(arg.region),
- InstanceID: types.StringValue(arg.instanceID),
- UserID: types.Int64Value(arg.userID),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
// Set refreshed state
diags = resp.State.Set(ctx, model)
resp.Diagnostics.Append(diags...)
@@ -397,7 +360,7 @@ func (r *userResource) Update(
req resource.UpdateRequest,
resp *resource.UpdateResponse,
) { // nolint:gocritic // function signature required by Terraform
- var model resourceModel
+ var model Model
diags := req.Plan.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
@@ -405,18 +368,11 @@ func (r *userResource) Update(
}
ctx = core.InitProviderContext(ctx)
-
- arg := &clientArg{
- projectID: model.ProjectId.ValueString(),
- instanceID: model.InstanceId.ValueString(),
- region: r.providerData.GetRegionWithOverride(model.Region),
- }
-
- ctx = r.setTFLogFields(ctx, arg)
- ctx = core.InitProviderContext(ctx)
+ ctx = r.setTFLogFields(ctx, &model)
+ arg := r.getClientArg(&model)
// Retrieve values from state
- var stateModel resourceModel
+ var stateModel Model
diags = req.State.Get(ctx, &stateModel)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
@@ -429,26 +385,26 @@ func (r *userResource) Update(
}
// Generate API request body from model
- payload, err := toUpdatePayload(&model, roles)
+ payload, err := toUpdatePayload(&model, &roles)
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating user", fmt.Sprintf("Updating API payload: %v", err))
return
}
- userID64 := arg.userID
- if userID64 > math.MaxInt32 {
+ userId64 := arg.userId
+ if userId64 > math.MaxInt32 {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (userId)")
return
}
- userID := int32(userID64) // nolint:gosec // check is performed above
+ userId := int32(userId64)
// Update existing instance
- err = r.client.DefaultAPI.UpdateUserRequest(
+ err = r.client.UpdateUserRequest(
ctx,
- arg.projectID,
+ arg.projectId,
arg.region,
- arg.instanceID,
- userID,
+ arg.instanceId,
+ userId,
).UpdateUserRequestPayload(*payload).Execute()
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating user", err.Error())
@@ -457,52 +413,20 @@ func (r *userResource) Update(
ctx = core.LogResponse(ctx)
- // Set data returned by API in identity
- identity := UserResourceIdentityModel{
- ProjectID: types.StringValue(arg.projectID),
- Region: types.StringValue(arg.region),
- InstanceID: types.StringValue(arg.instanceID),
- UserID: types.Int64Value(userID64),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- // Verify update
- waitResp, err := postgresflexalphaWait.GetUserByIdWaitHandler(
- ctx,
- r.client.DefaultAPI,
- arg.projectID,
- arg.instanceID,
- arg.region,
- model.UserId.ValueInt64(),
- ).SetSleepBeforeWait(
- 10 * time.Second,
- ).SetTimeout(
- 15 * time.Minute,
- ).WaitWithContext(ctx)
+ exists, err := r.getUserResource(ctx, &stateModel)
if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "read user",
- fmt.Sprintf("user update waiting: %v", err),
- )
+ core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating user", fmt.Sprintf("Calling API: %v", err))
return
}
- if int64(waitResp.Id) != model.UserId.ValueInt64() {
+ if !exists {
core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "update user",
- "User creation waiting: returned id is nil or wrong",
+ ctx, &resp.Diagnostics, "Error updating user",
+ fmt.Sprintf("User ID '%v' resource not found after update", stateModel.UserId.ValueInt64()),
)
return
}
- arg.userID = int64(waitResp.Id)
// Set state to fully populated data
diags = resp.State.Set(ctx, stateModel)
@@ -519,232 +443,231 @@ func (r *userResource) Delete(
req resource.DeleteRequest,
resp *resource.DeleteResponse,
) { // nolint:gocritic // function signature required by Terraform
- var model resourceModel
+ var model Model
diags := req.State.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
- // Read identity data
- var identityData UserResourceIdentityModel
- resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
- if resp.Diagnostics.HasError() {
- return
- }
ctx = core.InitProviderContext(ctx)
+ ctx = r.setTFLogFields(ctx, &model)
+ arg := r.getClientArg(&model)
- arg, errExt := r.extractIdentityData(model, identityData)
- if errExt != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- extractErrorSummary,
- fmt.Sprintf(extractErrorMessage, errExt),
- )
- }
-
- ctx = r.setTFLogFields(ctx, arg)
- ctx = core.InitProviderContext(ctx)
-
- userID64 := arg.userID
- if userID64 > math.MaxInt32 {
+ userId64 := arg.userId
+ if userId64 > math.MaxInt32 {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (userId)")
return
}
- userID := int32(userID64) // nolint:gosec // check is performed above
+ userId := int32(userId64)
// Delete existing record set
- err := r.client.DefaultAPI.DeleteUserRequest(ctx, arg.projectID, arg.region, arg.instanceID, userID).Execute()
+ err := r.client.DeleteUserRequest(ctx, arg.projectId, arg.region, arg.instanceId, userId).Execute()
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting user", fmt.Sprintf("Calling API: %v", err))
}
ctx = core.LogResponse(ctx)
- // TODO: Verify deletion
- // exists, err := r.getUserResource(ctx, &model, arg)
- // if err != nil {
- // core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting user", fmt.Sprintf("Calling API: %v", err))
- // return
- //}
- // if exists {
- // core.LogAndAddError(
- // ctx, &resp.Diagnostics, "Error deleting user",
- // fmt.Sprintf("User ID '%v' resource still exists after deletion", model.UserId.ValueInt32()),
- // )
- // return
- //}
+ exists, err := r.getUserResource(ctx, &model)
+ if err != nil {
+ core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting user", fmt.Sprintf("Calling API: %v", err))
+ return
+ }
+ if exists {
+ core.LogAndAddError(
+ ctx, &resp.Diagnostics, "Error deleting user",
+ fmt.Sprintf("User ID '%v' resource still exists after deletion", model.UserId.ValueInt64()),
+ )
+ return
+ }
resp.State.RemoveResource(ctx)
tflog.Info(ctx, "Postgres Flex user deleted")
}
-// IdentitySchema defines the fields that are required to uniquely identify a resource.
-func (r *userResource) IdentitySchema(
- _ context.Context,
- _ resource.IdentitySchemaRequest,
- response *resource.IdentitySchemaResponse,
-) {
- response.IdentitySchema = identityschema.Schema{
- Attributes: map[string]identityschema.Attribute{
- "project_id": identityschema.StringAttribute{
- RequiredForImport: true,
- },
- "region": identityschema.StringAttribute{
- RequiredForImport: true,
- },
- "instance_id": identityschema.StringAttribute{
- RequiredForImport: true,
- },
- "user_id": identityschema.Int64Attribute{
- RequiredForImport: true,
- },
- },
- }
-}
-
-// clientArg holds the arguments for API calls.
-type clientArg struct {
- projectID string
- instanceID string
- region string
- userID int64
-}
-
// ImportState imports a resource into the Terraform state on success.
-// The expected import identifier format is: [project_id],[region],[instance_id],[database_id]
+// The expected format of the resource import identifier is: project_id,zone_id,record_set_id
func (r *userResource) ImportState(
ctx context.Context,
req resource.ImportStateRequest,
resp *resource.ImportStateResponse,
) {
- ctx = core.InitProviderContext(ctx)
-
- if req.ID != "" {
- idParts := strings.Split(req.ID, core.Separator)
-
- if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" {
- core.LogAndAddError(
- ctx, &resp.Diagnostics,
- "Error importing user",
- fmt.Sprintf(
- "Expected import identifier with format [project_id],[region],[instance_id],[user_id], got %q",
- req.ID,
- ),
- )
- return
- }
-
- userID, err := strconv.ParseInt(idParts[3], 10, 64)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error importing user",
- fmt.Sprintf("Invalid user_id format: %q. It must be a valid integer.", idParts[3]),
- )
- return
- }
-
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userID)...)
-
- tflog.Info(ctx, "Postgres Flex user state imported")
-
+ idParts := strings.Split(req.ID, core.Separator)
+ if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" {
+ core.LogAndAddError(
+ ctx, &resp.Diagnostics,
+ "Error importing user",
+ fmt.Sprintf(
+ "Expected import identifier with format [project_id],[region],[instance_id],[user_id], got %q",
+ req.ID,
+ ),
+ )
return
}
- // If no ID is provided, attempt to read identity attributes from the import configuration
- var identityData UserResourceIdentityModel
- resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- projectID := identityData.ProjectID.ValueString()
- region := identityData.Region.ValueString()
- instanceID := identityData.InstanceID.ValueString()
- userID := identityData.UserID.ValueInt64()
-
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectID)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceID)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userID)...)
-
- tflog.Info(ctx, "Postgres Flex user state imported")
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), idParts[3])...)
+ core.LogAndAddWarning(
+ ctx,
+ &resp.Diagnostics,
+ "postgresflexalpha user imported with empty password and empty uri",
+ "The user password and uri are not imported as they are only available upon creation of a new user. The password and uri fields will be empty.",
+ )
+ tflog.Info(ctx, "postgresflexalpha user state imported")
}
-// extractIdentityData extracts essential identifiers from the resource model, falling back to the identity model.
-func (r *userResource) extractIdentityData(
- model resourceModel,
- identity UserResourceIdentityModel,
-) (*clientArg, error) {
- var projectID, region, instanceID string
- var userID int64
- if !model.UserId.IsNull() && !model.UserId.IsUnknown() {
- userID = model.UserId.ValueInt64()
+func mapFields(userResp *postgresflex.GetUserResponse, model *Model, region string) error {
+ if userResp == nil {
+ return fmt.Errorf("response is nil")
+ }
+ if model == nil {
+ return fmt.Errorf("model input is nil")
+ }
+ user := userResp
+
+ var userId int64
+ if model.UserId.ValueInt64() != 0 {
+ userId = model.UserId.ValueInt64()
+ } else if user.Id != nil {
+ userId = *user.Id
} else {
- if identity.UserID.IsNull() || identity.UserID.IsUnknown() {
- return nil, fmt.Errorf("user_id not found in config")
+ return fmt.Errorf("user id not present")
+ }
+ model.Id = utils.BuildInternalTerraformId(
+ model.ProjectId.ValueString(), region, model.InstanceId.ValueString(), strconv.FormatInt(userId, 10),
+ )
+ model.UserId = types.Int64Value(userId)
+ model.Username = types.StringPointerValue(user.Name)
+
+ if user.Roles == nil {
+ model.Roles = types.SetNull(types.StringType)
+ } else {
+ var roles []attr.Value
+ for _, role := range *user.Roles {
+ roles = append(roles, types.StringValue(string(role)))
}
- userID = identity.UserID.ValueInt64()
+ rolesSet, diags := types.SetValue(types.StringType, roles)
+ if diags.HasError() {
+ return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags))
+ }
+ model.Roles = rolesSet
+ }
+ model.Host = types.StringPointerValue(user.Host)
+ model.Port = types.Int64PointerValue(user.Port)
+ model.Region = types.StringValue(region)
+ model.Status = types.StringPointerValue(user.Status)
+ model.ConnectionString = types.StringPointerValue(user.ConnectionString)
+ return nil
+}
+
+// getUserResource refreshes the resource state by calling the API and mapping the response to the model.
+// Returns true if the resource state was successfully refreshed, false if the resource does not exist.
+func (r *userResource) getUserResource(ctx context.Context, model *Model) (bool, error) {
+ ctx = r.setTFLogFields(ctx, model)
+ arg := r.getClientArg(model)
+
+ userId64 := arg.userId
+ if userId64 > math.MaxInt32 {
+ return false, errors.New("error in type conversion: int value too large (userId)")
+ }
+ userId := int32(userId64)
+
+ // API Call
+ userResp, err := r.client.GetUserRequest(ctx, arg.projectId, arg.region, arg.instanceId, userId).Execute()
+
+ if err != nil {
+ var oapiErr *oapierror.GenericOpenAPIError
+ if errors.As(err, &oapiErr) && oapiErr.StatusCode == http.StatusNotFound {
+ return false, nil
+ }
+
+ return false, fmt.Errorf("error fetching user resource: %w", err)
}
- if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() {
- projectID = model.ProjectId.ValueString()
- } else {
- if identity.ProjectID.IsNull() || identity.ProjectID.IsUnknown() {
- return nil, fmt.Errorf("project_id not found in config")
- }
- projectID = identity.ProjectID.ValueString()
+ if err := mapFields(userResp, model, arg.region); err != nil {
+ return false, fmt.Errorf("error mapping user resource: %w", err)
}
- if !model.Region.IsNull() && !model.Region.IsUnknown() {
- region = r.providerData.GetRegionWithOverride(model.Region)
- } else {
- if identity.Region.IsNull() || identity.Region.IsUnknown() {
- return nil, fmt.Errorf("region not found in config")
- }
- region = r.providerData.GetRegionWithOverride(identity.Region)
- }
+ return true, nil
+}
- if !model.InstanceId.IsNull() && !model.InstanceId.IsUnknown() {
- instanceID = model.InstanceId.ValueString()
- } else {
- if identity.InstanceID.IsNull() || identity.InstanceID.IsUnknown() {
- return nil, fmt.Errorf("instance_id not found in config")
- }
- instanceID = identity.InstanceID.ValueString()
- }
+type clientArg struct {
+ projectId string
+ instanceId string
+ region string
+ userId int64
+}
+
+// getClientArg constructs client arguments from the model.
+func (r *userResource) getClientArg(model *Model) *clientArg {
return &clientArg{
- projectID: projectID,
- instanceID: instanceID,
- region: region,
- userID: userID,
- }, nil
+ projectId: model.ProjectId.ValueString(),
+ instanceId: model.InstanceId.ValueString(),
+ region: r.providerData.GetRegionWithOverride(model.Region),
+ userId: model.UserId.ValueInt64(),
+ }
}
// setTFLogFields adds relevant fields to the context for terraform logging purposes.
-func (r *userResource) setTFLogFields(ctx context.Context, arg *clientArg) context.Context {
- ctx = tflog.SetField(ctx, "project_id", arg.projectID)
- ctx = tflog.SetField(ctx, "instance_id", arg.instanceID)
- ctx = tflog.SetField(ctx, "region", arg.region)
- ctx = tflog.SetField(ctx, "user_id", arg.userID)
+func (r *userResource) setTFLogFields(ctx context.Context, model *Model) context.Context {
+ usrCtx := r.getClientArg(model)
+
+ ctx = tflog.SetField(ctx, "project_id", usrCtx.projectId)
+ ctx = tflog.SetField(ctx, "instance_id", usrCtx.instanceId)
+ ctx = tflog.SetField(ctx, "user_id", usrCtx.userId)
+ ctx = tflog.SetField(ctx, "region", usrCtx.region)
return ctx
}
-// expandRoles converts a Terraform list of roles to a string slice.
-func (r *userResource) expandRoles(ctx context.Context, rolesSet types.List, diags *diag.Diagnostics) []string {
+func (r *userResource) expandRoles(ctx context.Context, rolesSet types.Set, diags *diag.Diagnostics) []string {
if rolesSet.IsNull() || rolesSet.IsUnknown() {
return nil
}
var roles []string
diags.Append(rolesSet.ElementsAs(ctx, &roles, false)...)
- slices.Sort(roles)
return roles
}
+
+func toCreatePayload(model *Model, roles *[]string) (*postgresflex.CreateUserRequestPayload, error) {
+ if model == nil {
+ return nil, fmt.Errorf("nil model")
+ }
+ if roles == nil {
+ return nil, fmt.Errorf("nil roles")
+ }
+
+ return &postgresflex.CreateUserRequestPayload{
+ Roles: toPayloadRoles(roles),
+ Name: conversion.StringValueToPointer(model.Username),
+ }, nil
+}
+
+func toPayloadRoles(roles *[]string) *[]postgresflex.UserRole {
+ var userRoles = make([]postgresflex.UserRole, 0, len(*roles))
+ for _, role := range *roles {
+ userRoles = append(userRoles, postgresflex.UserRole(role))
+ }
+ return &userRoles
+}
+
+func toUpdatePayload(model *Model, roles *[]string) (
+ *postgresflex.UpdateUserRequestPayload,
+ error,
+) {
+ if model == nil {
+ return nil, fmt.Errorf("nil model")
+ }
+ if roles == nil {
+ return nil, fmt.Errorf("nil roles")
+ }
+
+ return &postgresflex.UpdateUserRequestPayload{
+ Name: conversion.StringValueToPointer(model.Username),
+ Roles: toPayloadRoles(roles),
+ }, nil
+}
diff --git a/stackit/internal/services/postgresflexalpha/user/resource_test.go b/stackit/internal/services/postgresflexalpha/user/resource_test.go
new file mode 100644
index 00000000..e4a13482
--- /dev/null
+++ b/stackit/internal/services/postgresflexalpha/user/resource_test.go
@@ -0,0 +1,448 @@
+package postgresflexalpha
+
+import (
+ "testing"
+
+ "github.com/google/go-cmp/cmp"
+ "github.com/hashicorp/terraform-plugin-framework/attr"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/stackitcloud/stackit-sdk-go/core/utils"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
+)
+
+func TestMapFieldsCreate(t *testing.T) {
+ const testRegion = "region"
+ tests := []struct {
+ description string
+ input *postgresflexalpha.GetUserResponse
+ region string
+ expected Model
+ isValid bool
+ }{
+ {
+ "default_values",
+ &postgresflexalpha.GetUserResponse{
+ Id: utils.Ptr(int64(1)),
+ },
+ testRegion,
+ Model{
+ Id: types.StringValue("pid,region,iid,1"),
+ UserId: types.Int64Value(1),
+ InstanceId: types.StringValue("iid"),
+ ProjectId: types.StringValue("pid"),
+ Username: types.StringNull(),
+ Roles: types.SetNull(types.StringType),
+ Password: types.StringNull(),
+ Host: types.StringNull(),
+ Port: types.Int64Null(),
+ Region: types.StringValue(testRegion),
+ Status: types.StringNull(),
+ ConnectionString: types.StringNull(),
+ },
+ true,
+ },
+ {
+ "simple_values",
+ &postgresflexalpha.GetUserResponse{
+ Id: utils.Ptr(int64(1)),
+ Name: utils.Ptr("username"),
+ ConnectionString: utils.Ptr("connection_string"),
+ Status: utils.Ptr("status"),
+ },
+ testRegion,
+ Model{
+ Id: types.StringValue("pid,region,iid,1"),
+ UserId: types.Int64Value(1),
+ InstanceId: types.StringValue("iid"),
+ ProjectId: types.StringValue("pid"),
+ Username: types.StringValue("username"),
+ Roles: types.SetNull(types.StringType),
+ Password: types.StringNull(),
+ Host: types.StringNull(),
+ Port: types.Int64Null(),
+ Region: types.StringValue(testRegion),
+ Status: types.StringValue("status"),
+ ConnectionString: types.StringValue("connection_string"),
+ },
+ true,
+ },
+ {
+ "null_fields_and_int_conversions",
+ &postgresflexalpha.GetUserResponse{
+ Id: utils.Ptr(int64(1)),
+ Name: nil,
+ ConnectionString: nil,
+ Status: nil,
+ },
+ testRegion,
+ Model{
+ Id: types.StringValue("pid,region,iid,1"),
+ UserId: types.Int64Value(1),
+ InstanceId: types.StringValue("iid"),
+ ProjectId: types.StringValue("pid"),
+ Username: types.StringNull(),
+ Roles: types.SetNull(types.StringType),
+ Password: types.StringNull(),
+ Host: types.StringNull(),
+ Port: types.Int64Null(),
+ Region: types.StringValue(testRegion),
+ Status: types.StringNull(),
+ ConnectionString: types.StringNull(),
+ },
+ true,
+ },
+ {
+ "nil_response",
+ nil,
+ testRegion,
+ Model{},
+ false,
+ },
+ {
+ "nil_response_2",
+ &postgresflexalpha.GetUserResponse{},
+ testRegion,
+ Model{},
+ false,
+ },
+ {
+ "no_resource_id",
+ &postgresflexalpha.GetUserResponse{},
+ testRegion,
+ Model{},
+ false,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(
+ tt.description, func(t *testing.T) {
+ state := &Model{
+ ProjectId: tt.expected.ProjectId,
+ InstanceId: tt.expected.InstanceId,
+ }
+
+ err := mapFields(tt.input, state, tt.region)
+ if !tt.isValid && err == nil {
+ t.Fatalf("Should have failed")
+ }
+ if tt.isValid && err != nil {
+ t.Fatalf("Should not have failed: %v", err)
+ }
+ if tt.isValid {
+ diff := cmp.Diff(state, &tt.expected)
+ if diff != "" {
+ t.Fatalf("Data does not match: %s", diff)
+ }
+ }
+ },
+ )
+ }
+}
+
+func TestMapFields(t *testing.T) {
+ const testRegion = "region"
+ tests := []struct {
+ description string
+ input *postgresflexalpha.GetUserResponse
+ region string
+ expected Model
+ isValid bool
+ }{
+ {
+ "default_values",
+ &postgresflexalpha.GetUserResponse{},
+ testRegion,
+ Model{
+ Id: types.StringValue("pid,region,iid,1"),
+ UserId: types.Int64Value(int64(1)),
+ InstanceId: types.StringValue("iid"),
+ ProjectId: types.StringValue("pid"),
+ Username: types.StringNull(),
+ Roles: types.SetNull(types.StringType),
+ Host: types.StringNull(),
+ Port: types.Int64Null(),
+ Region: types.StringValue(testRegion),
+ Status: types.StringNull(),
+ ConnectionString: types.StringNull(),
+ },
+ true,
+ },
+ {
+ "simple_values",
+ &postgresflexalpha.GetUserResponse{
+ Roles: &[]postgresflexalpha.UserRole{
+ "role_1",
+ "role_2",
+ "",
+ },
+ Name: utils.Ptr("username"),
+ Host: utils.Ptr("host"),
+ Port: utils.Ptr(int64(1234)),
+ },
+ testRegion,
+ Model{
+ Id: types.StringValue("pid,region,iid,1"),
+ UserId: types.Int64Value(1),
+ InstanceId: types.StringValue("iid"),
+ ProjectId: types.StringValue("pid"),
+ Username: types.StringValue("username"),
+ Roles: types.SetValueMust(
+ types.StringType, []attr.Value{
+ types.StringValue("role_1"),
+ types.StringValue("role_2"),
+ types.StringValue(""),
+ },
+ ),
+ Host: types.StringValue("host"),
+ Port: types.Int64Value(1234),
+ Region: types.StringValue(testRegion),
+ Status: types.StringNull(),
+ ConnectionString: types.StringNull(),
+ },
+ true,
+ },
+ {
+ "null_fields_and_int_conversions",
+ &postgresflexalpha.GetUserResponse{
+ Id: utils.Ptr(int64(1)),
+ Name: nil,
+ Host: nil,
+ Port: utils.Ptr(int64(2123456789)),
+ },
+ testRegion,
+ Model{
+ Id: types.StringValue("pid,region,iid,1"),
+ UserId: types.Int64Value(1),
+ InstanceId: types.StringValue("iid"),
+ ProjectId: types.StringValue("pid"),
+ Username: types.StringNull(),
+ Roles: types.SetNull(types.StringType),
+ Host: types.StringNull(),
+ Port: types.Int64Value(2123456789),
+ Region: types.StringValue(testRegion),
+ Status: types.StringNull(),
+ ConnectionString: types.StringNull(),
+ },
+ true,
+ },
+ {
+ "nil_response",
+ nil,
+ testRegion,
+ Model{},
+ false,
+ },
+ {
+ "nil_response_2",
+ &postgresflexalpha.GetUserResponse{},
+ testRegion,
+ Model{},
+ false,
+ },
+ {
+ "no_resource_id",
+ &postgresflexalpha.GetUserResponse{},
+ testRegion,
+ Model{},
+ false,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(
+ tt.description, func(t *testing.T) {
+ state := &Model{
+ ProjectId: tt.expected.ProjectId,
+ InstanceId: tt.expected.InstanceId,
+ UserId: tt.expected.UserId,
+ }
+ err := mapFields(tt.input, state, tt.region)
+ if !tt.isValid && err == nil {
+ t.Fatalf("Should have failed")
+ }
+ if tt.isValid && err != nil {
+ t.Fatalf("Should not have failed: %v", err)
+ }
+ if tt.isValid {
+ diff := cmp.Diff(state, &tt.expected)
+ if diff != "" {
+ t.Fatalf("Data does not match: %s", diff)
+ }
+ }
+ },
+ )
+ }
+}
+
+func TestToCreatePayload(t *testing.T) {
+ tests := []struct {
+ description string
+ input *Model
+ inputRoles *[]string
+ expected *postgresflexalpha.CreateUserRequestPayload
+ isValid bool
+ }{
+ {
+ "default_values",
+ &Model{},
+ &[]string{},
+ &postgresflexalpha.CreateUserRequestPayload{
+ Name: nil,
+ Roles: &[]postgresflexalpha.UserRole{},
+ },
+ true,
+ },
+ {
+ "simple_values",
+ &Model{
+ Username: types.StringValue("username"),
+ },
+ &[]string{
+ "role_1",
+ "role_2",
+ },
+ &postgresflexalpha.CreateUserRequestPayload{
+ Name: utils.Ptr("username"),
+ Roles: &[]postgresflexalpha.UserRole{
+ "role_1",
+ "role_2",
+ },
+ },
+ true,
+ },
+ {
+ "null_fields_and_int_conversions",
+ &Model{
+ Username: types.StringNull(),
+ },
+ &[]string{
+ "",
+ },
+ &postgresflexalpha.CreateUserRequestPayload{
+ Roles: &[]postgresflexalpha.UserRole{
+ "",
+ },
+ Name: nil,
+ },
+ true,
+ },
+ {
+ "nil_model",
+ nil,
+ &[]string{},
+ nil,
+ false,
+ },
+ {
+ "nil_roles",
+ &Model{},
+ nil,
+ nil,
+ false,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(
+ tt.description, func(t *testing.T) {
+ output, err := toCreatePayload(tt.input, tt.inputRoles)
+ if !tt.isValid && err == nil {
+ t.Fatalf("Should have failed")
+ }
+ if tt.isValid && err != nil {
+ t.Fatalf("Should not have failed: %v", err)
+ }
+ if tt.isValid {
+ diff := cmp.Diff(output, tt.expected)
+ if diff != "" {
+ t.Fatalf("Data does not match: %s", diff)
+ }
+ }
+ },
+ )
+ }
+}
+
+func TestToUpdatePayload(t *testing.T) {
+ tests := []struct {
+ description string
+ input *Model
+ inputRoles *[]string
+ expected *postgresflexalpha.UpdateUserRequestPayload
+ isValid bool
+ }{
+ {
+ "default_values",
+ &Model{},
+ &[]string{},
+ &postgresflexalpha.UpdateUserRequestPayload{
+ Roles: &[]postgresflexalpha.UserRole{},
+ },
+ true,
+ },
+ {
+ "default_values",
+ &Model{
+ Username: types.StringValue("username"),
+ },
+ &[]string{
+ "role_1",
+ "role_2",
+ },
+ &postgresflexalpha.UpdateUserRequestPayload{
+ Name: utils.Ptr("username"),
+ Roles: &[]postgresflexalpha.UserRole{
+ "role_1",
+ "role_2",
+ },
+ },
+ true,
+ },
+ {
+ "null_fields_and_int_conversions",
+ &Model{
+ Username: types.StringNull(),
+ },
+ &[]string{
+ "",
+ },
+ &postgresflexalpha.UpdateUserRequestPayload{
+ Roles: &[]postgresflexalpha.UserRole{
+ "",
+ },
+ },
+ true,
+ },
+ {
+ "nil_model",
+ nil,
+ &[]string{},
+ nil,
+ false,
+ },
+ {
+ "nil_roles",
+ &Model{},
+ nil,
+ nil,
+ false,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(
+ tt.description, func(t *testing.T) {
+ output, err := toUpdatePayload(tt.input, tt.inputRoles)
+ if !tt.isValid && err == nil {
+ t.Fatalf("Should have failed")
+ }
+ if tt.isValid && err != nil {
+ t.Fatalf("Should not have failed: %v", err)
+ }
+ if tt.isValid {
+ diff := cmp.Diff(output, tt.expected)
+ if diff != "" {
+ t.Fatalf("Data does not match: %s", diff)
+ }
+ }
+ },
+ )
+ }
+}
diff --git a/stackit/internal/services/postgresflexalpha/user/resources_gen/user_resource_gen.go b/stackit/internal/services/postgresflexalpha/user/resources_gen/user_resource_gen.go
index f96d8d93..9734c2a9 100644
--- a/stackit/internal/services/postgresflexalpha/user/resources_gen/user_resource_gen.go
+++ b/stackit/internal/services/postgresflexalpha/user/resources_gen/user_resource_gen.go
@@ -14,6 +14,16 @@ import (
func UserResourceSchema(ctx context.Context) schema.Schema {
return schema.Schema{
Attributes: map[string]schema.Attribute{
+ "connection_string": schema.StringAttribute{
+ Computed: true,
+ Description: "The connection string for the user to the instance.",
+ MarkdownDescription: "The connection string for the user to the instance.",
+ },
+ "host": schema.StringAttribute{
+ Computed: true,
+ Description: "The host of the instance in which the user belongs to.",
+ MarkdownDescription: "The host of the instance in which the user belongs to.",
+ },
"id": schema.Int64Attribute{
Computed: true,
Description: "The ID of the user.",
@@ -35,6 +45,11 @@ func UserResourceSchema(ctx context.Context) schema.Schema {
Description: "The password for the user.",
MarkdownDescription: "The password for the user.",
},
+ "port": schema.Int64Attribute{
+ Computed: true,
+ Description: "The port of the instance in which the user belongs to.",
+ MarkdownDescription: "The port of the instance in which the user belongs to.",
+ },
"project_id": schema.StringAttribute{
Optional: true,
Computed: true,
@@ -75,13 +90,16 @@ func UserResourceSchema(ctx context.Context) schema.Schema {
}
type UserModel struct {
- Id types.Int64 `tfsdk:"id"`
- InstanceId types.String `tfsdk:"instance_id"`
- Name types.String `tfsdk:"name"`
- Password types.String `tfsdk:"password"`
- ProjectId types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- Roles types.List `tfsdk:"roles"`
- Status types.String `tfsdk:"status"`
- UserId types.Int64 `tfsdk:"user_id"`
+ ConnectionString types.String `tfsdk:"connection_string"`
+ Host types.String `tfsdk:"host"`
+ Id types.Int64 `tfsdk:"id"`
+ InstanceId types.String `tfsdk:"instance_id"`
+ Name types.String `tfsdk:"name"`
+ Password types.String `tfsdk:"password"`
+ Port types.Int64 `tfsdk:"port"`
+ ProjectId types.String `tfsdk:"project_id"`
+ Region types.String `tfsdk:"region"`
+ Roles types.List `tfsdk:"roles"`
+ Status types.String `tfsdk:"status"`
+ UserId types.Int64 `tfsdk:"user_id"`
}
diff --git a/stackit/internal/utils/planModifiers.go b/stackit/internal/services/postgresflexalpha/utils/planModifiers.go
similarity index 100%
rename from stackit/internal/utils/planModifiers.go
rename to stackit/internal/services/postgresflexalpha/utils/planModifiers.go
diff --git a/stackit/internal/services/postgresflexalpha/utils/util.go b/stackit/internal/services/postgresflexalpha/utils/util.go
index 35047574..7d6c721a 100644
--- a/stackit/internal/services/postgresflexalpha/utils/util.go
+++ b/stackit/internal/services/postgresflexalpha/utils/util.go
@@ -8,8 +8,7 @@ import (
"github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/stackitcloud/stackit-sdk-go/core/config"
-
- postgresflex "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
+ postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
diff --git a/stackit/internal/services/postgresflexalpha/utils/util_test.go b/stackit/internal/services/postgresflexalpha/utils/util_test.go
index 16791f2b..185ece19 100644
--- a/stackit/internal/services/postgresflexalpha/utils/util_test.go
+++ b/stackit/internal/services/postgresflexalpha/utils/util_test.go
@@ -11,11 +11,10 @@ import (
"github.com/hashicorp/terraform-plugin-framework/diag"
sdkClients "github.com/stackitcloud/stackit-sdk-go/core/clients"
"github.com/stackitcloud/stackit-sdk-go/core/config"
-
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
- "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
+ postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
)
const (
@@ -38,7 +37,7 @@ func TestConfigureClient(t *testing.T) {
name string
args args
wantErr bool
- expected *v3alpha1api.APIClient
+ expected *postgresflex.APIClient
}{
{
name: "default endpoint",
@@ -47,8 +46,8 @@ func TestConfigureClient(t *testing.T) {
Version: testVersion,
},
},
- expected: func() *v3alpha1api.APIClient {
- apiClient, err := v3alpha1api.NewAPIClient(
+ expected: func() *postgresflex.APIClient {
+ apiClient, err := postgresflex.NewAPIClient(
config.WithRegion("eu01"),
utils.UserAgentConfigOption(testVersion),
)
@@ -67,8 +66,8 @@ func TestConfigureClient(t *testing.T) {
PostgresFlexCustomEndpoint: testCustomEndpoint,
},
},
- expected: func() *v3alpha1api.APIClient {
- apiClient, err := v3alpha1api.NewAPIClient(
+ expected: func() *postgresflex.APIClient {
+ apiClient, err := postgresflex.NewAPIClient(
utils.UserAgentConfigOption(testVersion),
config.WithEndpoint(testCustomEndpoint),
)
diff --git a/stackit/internal/services/sqlserverflexalpha/database/datasource.go b/stackit/internal/services/sqlserverflexalpha/database/datasource.go
index 137c29c7..cd796159 100644
--- a/stackit/internal/services/sqlserverflexalpha/database/datasource.go
+++ b/stackit/internal/services/sqlserverflexalpha/database/datasource.go
@@ -2,104 +2,54 @@ package sqlserverflexalpha
import (
"context"
- "fmt"
- "net/http"
"github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
-
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-
- sqlserverflexalphaPkg "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3alpha1api"
-
sqlserverflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/database/datasources_gen"
+ sqlserverflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils"
)
var _ datasource.DataSource = (*databaseDataSource)(nil)
-const errorPrefix = "[sqlserverflexalpha - Database]"
-
func NewDatabaseDataSource() datasource.DataSource {
return &databaseDataSource{}
}
-type dataSourceModel struct {
- sqlserverflexalphaGen.DatabaseModel
- TerraformId types.String `tfsdk:"id"`
-}
-
type databaseDataSource struct {
- client *sqlserverflexalphaPkg.APIClient
+ client *sqlserverflexalpha.APIClient
providerData core.ProviderData
}
-func (d *databaseDataSource) Metadata(
- _ context.Context,
- req datasource.MetadataRequest,
- resp *datasource.MetadataResponse,
-) {
+func (d *databaseDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_database"
}
func (d *databaseDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
resp.Schema = sqlserverflexalphaGen.DatabaseDataSourceSchema(ctx)
- resp.Schema.Attributes["id"] = schema.StringAttribute{
- Computed: true,
- Description: "The terraform internal identifier.",
- MarkdownDescription: "The terraform internal identifier.",
- }
}
// Configure adds the provider configured client to the data source.
-func (d *databaseDataSource) Configure(
- ctx context.Context,
- req datasource.ConfigureRequest,
- resp *datasource.ConfigureResponse,
-) {
+func (d *databaseDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
var ok bool
d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
if !ok {
return
}
- apiClientConfigOptions := []config.ConfigurationOption{
- config.WithCustomAuth(d.providerData.RoundTripper),
- utils.UserAgentConfigOption(d.providerData.Version),
- }
- if d.providerData.SQLServerFlexCustomEndpoint != "" {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithEndpoint(d.providerData.SQLServerFlexCustomEndpoint),
- )
- } else {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithRegion(d.providerData.GetRegion()),
- )
- }
- apiClient, err := sqlserverflexalphaPkg.NewAPIClient(apiClientConfigOptions...)
- if err != nil {
- resp.Diagnostics.AddError(
- "Error configuring API client",
- fmt.Sprintf(
- "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
- err,
- ),
- )
+ apiClient := sqlserverflexUtils.ConfigureClient(ctx, &d.providerData, &resp.Diagnostics)
+ if resp.Diagnostics.HasError() {
return
}
d.client = apiClient
- tflog.Info(ctx, fmt.Sprintf("%s client configured", errorPrefix))
+ tflog.Info(ctx, "SQL SERVER Flex database client configured")
}
func (d *databaseDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var data dataSourceModel
+ var data sqlserverflexalphaGen.DatabaseModel
+
// Read Terraform configuration data into the model
resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
@@ -107,69 +57,11 @@ func (d *databaseDataSource) Read(ctx context.Context, req datasource.ReadReques
return
}
- ctx = core.InitProviderContext(ctx)
+ // Todo: Read API call logic
- // Extract identifiers from the plan
- projectId := data.ProjectId.ValueString()
- region := d.providerData.GetRegionWithOverride(data.Region)
- instanceId := data.InstanceId.ValueString()
-
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
-
- databaseName := data.DatabaseName.ValueString()
-
- databaseResp, err := d.client.DefaultAPI.GetDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute()
- if err != nil {
- handleReadError(ctx, &resp.Diagnostics, err, projectId, instanceId)
- resp.State.RemoveResource(ctx)
- return
- }
-
- ctx = core.LogResponse(ctx)
- // Map response body to schema and populate Computed attribute values
- err = mapFields(databaseResp, &data, region)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error reading database",
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
+ // Example data value setting
+ // data.Id = types.StringValue("example-id")
// Save data into Terraform state
resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-
- tflog.Info(ctx, "SQL Server Flex Alpha database read")
-}
-
-// handleReadError centralizes API error handling for the Read operation.
-func handleReadError(ctx context.Context, diags *diag.Diagnostics, err error, projectId, instanceId string) {
- utils.LogError(
- ctx,
- diags,
- err,
- "Reading database",
- fmt.Sprintf(
- "Could not retrieve database for instance %q in project %q.",
- instanceId,
- projectId,
- ),
- map[int]string{
- http.StatusBadRequest: fmt.Sprintf(
- "Invalid request parameters for project %q and instance %q.",
- projectId,
- instanceId,
- ),
- http.StatusNotFound: fmt.Sprintf(
- "Database, instance %q, or project %q not found.",
- instanceId,
- projectId,
- ),
- http.StatusForbidden: fmt.Sprintf("Forbidden access to project %q.", projectId),
- },
- )
}
diff --git a/stackit/internal/services/sqlserverflexalpha/database/datasources_gen/database_data_source_gen.go b/stackit/internal/services/sqlserverflexalpha/database/datasources_gen/database_data_source_gen.go
index 82250802..25406f5f 100644
--- a/stackit/internal/services/sqlserverflexalpha/database/datasources_gen/database_data_source_gen.go
+++ b/stackit/internal/services/sqlserverflexalpha/database/datasources_gen/database_data_source_gen.go
@@ -29,7 +29,7 @@ func DatabaseDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The name of the database.",
MarkdownDescription: "The name of the database.",
},
- "tf_original_api_id": schema.Int64Attribute{
+ "id": schema.Int64Attribute{
Computed: true,
Description: "The id of the database.",
MarkdownDescription: "The id of the database.",
@@ -72,7 +72,7 @@ type DatabaseModel struct {
CollationName types.String `tfsdk:"collation_name"`
CompatibilityLevel types.Int64 `tfsdk:"compatibility_level"`
DatabaseName types.String `tfsdk:"database_name"`
- Id types.Int64 `tfsdk:"tf_original_api_id"`
+ Id types.Int64 `tfsdk:"id"`
InstanceId types.String `tfsdk:"instance_id"`
Name types.String `tfsdk:"name"`
Owner types.String `tfsdk:"owner"`
diff --git a/stackit/internal/services/sqlserverflexalpha/database/mapper.go b/stackit/internal/services/sqlserverflexalpha/database/mapper.go
deleted file mode 100644
index 65c19fa2..00000000
--- a/stackit/internal/services/sqlserverflexalpha/database/mapper.go
+++ /dev/null
@@ -1,106 +0,0 @@
-package sqlserverflexalpha
-
-import (
- "fmt"
- "strings"
-
- "github.com/hashicorp/terraform-plugin-framework/types"
- coreUtils "github.com/stackitcloud/stackit-sdk-go/core/utils"
-
- sqlserverflexalpha "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3alpha1api"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-)
-
-// mapFields maps fields from a ListDatabase API response to a resourceModel for the data source.
-func mapFields(source *sqlserverflexalpha.GetDatabaseResponse, model *dataSourceModel, region string) error {
- if source == nil {
- return fmt.Errorf("response is nil")
- }
- if source.Id == 0 {
- return fmt.Errorf("id not present")
- }
- if model == nil {
- return fmt.Errorf("model given is nil")
- }
-
- var databaseId int64
- if model.Id.ValueInt64() != 0 {
- databaseId = model.Id.ValueInt64()
- } else if source.Id != 0 {
- databaseId = source.Id
- } else {
- return fmt.Errorf("database id not present")
- }
-
- model.Id = types.Int64Value(databaseId)
- model.DatabaseName = types.StringValue(source.GetName())
- model.Name = types.StringValue(source.GetName())
- model.Owner = types.StringValue(strings.Trim(source.GetOwner(), "\""))
- model.Region = types.StringValue(region)
- model.ProjectId = types.StringValue(model.ProjectId.ValueString())
- model.InstanceId = types.StringValue(model.InstanceId.ValueString())
- model.CompatibilityLevel = types.Int64Value(int64(source.GetCompatibilityLevel()))
- model.CollationName = types.StringValue(source.GetCollationName())
-
- model.TerraformId = utils.BuildInternalTerraformId(
- model.ProjectId.ValueString(),
- region,
- model.InstanceId.ValueString(),
- model.DatabaseName.ValueString(),
- )
-
- return nil
-}
-
-// mapResourceFields maps fields from a ListDatabase API response to a resourceModel for the resource.
-func mapResourceFields(source *sqlserverflexalpha.GetDatabaseResponse, model *resourceModel, region string) error {
- if source == nil {
- return fmt.Errorf("response is nil")
- }
- if source.Id == 0 {
- return fmt.Errorf("id not present")
- }
- if model == nil {
- return fmt.Errorf("model input is nil")
- }
-
- var databaseId int64
- if model.Id.ValueInt64() != 0 {
- databaseId = model.Id.ValueInt64()
- } else if source.Id != 0 {
- databaseId = source.Id
- } else {
- return fmt.Errorf("database id not present")
- }
-
- model.Id = types.Int64Value(databaseId)
- model.DatabaseName = types.StringValue(source.GetName())
- model.Name = types.StringValue(source.GetName())
- model.Owner = types.StringValue(strings.Trim(source.GetOwner(), "\""))
- model.Region = types.StringValue(region)
- model.ProjectId = types.StringValue(model.ProjectId.ValueString())
- model.InstanceId = types.StringValue(model.InstanceId.ValueString())
-
- model.Compatibility = types.Int64Value(int64(source.GetCompatibilityLevel()))
- model.CompatibilityLevel = types.Int64Value(int64(source.GetCompatibilityLevel()))
-
- model.Collation = types.StringValue(source.GetCollationName()) // it does not come back from api
- model.CollationName = types.StringValue(source.GetCollationName())
-
- return nil
-}
-
-// toCreatePayload converts the resource model to an API create payload.
-func toCreatePayload(model *resourceModel) (*sqlserverflexalpha.CreateDatabaseRequestPayload, error) {
- if model == nil {
- return nil, fmt.Errorf("nil model")
- }
-
- return &sqlserverflexalpha.CreateDatabaseRequestPayload{
- Name: model.Name.ValueString(),
- Owner: model.Owner.ValueString(),
- Collation: model.Collation.ValueStringPointer(),
- Compatibility: coreUtils.Ptr(int32(model.Compatibility.ValueInt64())), //nolint:gosec // TODO
- }, nil
-}
diff --git a/stackit/internal/services/sqlserverflexalpha/database/mapper_test.go b/stackit/internal/services/sqlserverflexalpha/database/mapper_test.go
deleted file mode 100644
index 96a5df1d..00000000
--- a/stackit/internal/services/sqlserverflexalpha/database/mapper_test.go
+++ /dev/null
@@ -1,233 +0,0 @@
-package sqlserverflexalpha
-
-import (
- "testing"
-
- "github.com/google/go-cmp/cmp"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/stackitcloud/stackit-sdk-go/core/utils"
- sqlserverflexalpha "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3alpha1api"
-
- datasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/database/datasources_gen"
-)
-
-func TestMapFields(t *testing.T) {
- type given struct {
- source *sqlserverflexalpha.GetDatabaseResponse
- model *dataSourceModel
- region string
- }
- type expected struct {
- model *dataSourceModel
- err bool
- }
-
- testcases := []struct {
- name string
- given given
- expected expected
- }{
- {
- name: "should map fields correctly",
- given: given{
- source: &sqlserverflexalpha.GetDatabaseResponse{
- Id: (int64(1)),
- Name: ("my-db"),
- CollationName: ("collation"),
- CompatibilityLevel: (int32(150)),
- Owner: ("my-owner"),
- },
- model: &dataSourceModel{
- DatabaseModel: datasource.DatabaseModel{
- ProjectId: types.StringValue("my-project"),
- InstanceId: types.StringValue("my-instance"),
- },
- },
- region: "eu01",
- },
- expected: expected{
- model: &dataSourceModel{
- DatabaseModel: datasource.DatabaseModel{
- Id: types.Int64Value(1),
- Name: types.StringValue("my-db"),
- DatabaseName: types.StringValue("my-db"),
- Owner: types.StringValue("my-owner"),
- Region: types.StringValue("eu01"),
- InstanceId: types.StringValue("my-instance"),
- ProjectId: types.StringValue("my-project"),
- CompatibilityLevel: types.Int64Value(150),
- CollationName: types.StringValue("collation"),
- },
- TerraformId: types.StringValue("my-project,eu01,my-instance,my-db"),
- },
- },
- },
- {
- name: "should fail on nil source",
- given: given{
- source: nil,
- model: &dataSourceModel{},
- },
- expected: expected{err: true},
- },
- {
- name: "should fail on nil source ID",
- given: given{
- source: &sqlserverflexalpha.GetDatabaseResponse{Id: 0},
- model: &dataSourceModel{},
- },
- expected: expected{err: true},
- },
- {
- name: "should fail on nil model",
- given: given{
- source: &sqlserverflexalpha.GetDatabaseResponse{Id: (int64(1))},
- model: nil,
- },
- expected: expected{err: true},
- },
- }
-
- for _, tc := range testcases {
- t.Run(
- tc.name, func(t *testing.T) {
- err := mapFields(tc.given.source, tc.given.model, tc.given.region)
- if (err != nil) != tc.expected.err {
- t.Fatalf("expected error: %v, got: %v", tc.expected.err, err)
- }
- if err == nil {
- if diff := cmp.Diff(tc.expected.model, tc.given.model); diff != "" {
- t.Errorf("model mismatch (-want +got):\n%s", diff)
- }
- }
- },
- )
- }
-}
-
-func TestMapResourceFields(t *testing.T) {
- type given struct {
- source *sqlserverflexalpha.GetDatabaseResponse
- model *resourceModel
- region string
- }
- type expected struct {
- model *resourceModel
- err bool
- }
-
- testcases := []struct {
- name string
- given given
- expected expected
- }{
- {
- name: "should map fields correctly",
- given: given{
- source: &sqlserverflexalpha.GetDatabaseResponse{
- Id: (int64(1)),
- Name: ("my-db"),
- Owner: ("my-owner"),
- },
- model: &resourceModel{
- ProjectId: types.StringValue("my-project"),
- InstanceId: types.StringValue("my-instance"),
- },
- region: "eu01",
- },
- expected: expected{
- model: &resourceModel{
- Id: types.Int64Value(1),
- Name: types.StringValue("my-db"),
- Compatibility: types.Int64Value(0),
- CompatibilityLevel: types.Int64Value(0),
- Collation: types.StringValue(""),
- CollationName: types.StringValue(""),
- DatabaseName: types.StringValue("my-db"),
- InstanceId: types.StringValue("my-instance"),
- ProjectId: types.StringValue("my-project"),
- Region: types.StringValue("eu01"),
- Owner: types.StringValue("my-owner"),
- },
- },
- },
- {
- name: "should fail on nil source",
- given: given{
- source: nil,
- model: &resourceModel{},
- },
- expected: expected{err: true},
- },
- }
-
- for _, tc := range testcases {
- t.Run(
- tc.name, func(t *testing.T) {
- err := mapResourceFields(tc.given.source, tc.given.model, tc.given.region)
- if (err != nil) != tc.expected.err {
- t.Fatalf("expected error: %v, got: %v", tc.expected.err, err)
- }
- if err == nil {
- if diff := cmp.Diff(tc.expected.model, tc.given.model); diff != "" {
- t.Errorf("model mismatch (-want +got):\n%s", diff)
- }
- }
- },
- )
- }
-}
-
-func TestToCreatePayload(t *testing.T) {
- type given struct {
- model *resourceModel
- }
- type expected struct {
- payload *sqlserverflexalpha.CreateDatabaseRequestPayload
- err bool
- }
-
- testcases := []struct {
- name string
- given given
- expected expected
- }{
- {
- name: "should convert model to payload",
- given: given{
- model: &resourceModel{
- Name: types.StringValue("my-db"),
- Owner: types.StringValue("my-owner"),
- },
- },
- expected: expected{
- payload: &sqlserverflexalpha.CreateDatabaseRequestPayload{
- Name: "my-db",
- Owner: "my-owner",
- Compatibility: utils.Ptr(int32(0)),
- },
- },
- },
- {
- name: "should fail on nil model",
- given: given{model: nil},
- expected: expected{err: true},
- },
- }
-
- for _, tc := range testcases {
- t.Run(
- tc.name, func(t *testing.T) {
- actual, err := toCreatePayload(tc.given.model)
- if (err != nil) != tc.expected.err {
- t.Fatalf("expected error: %v, got: %v", tc.expected.err, err)
- }
- if err == nil {
- if diff := cmp.Diff(tc.expected.payload, actual); diff != "" {
- t.Errorf("payload mismatch (-want +got):\n%s", diff)
- }
- }
- },
- )
- }
-}
diff --git a/stackit/internal/services/sqlserverflexalpha/database/planModifiers.yaml b/stackit/internal/services/sqlserverflexalpha/database/planModifiers.yaml
deleted file mode 100644
index 1d010ed7..00000000
--- a/stackit/internal/services/sqlserverflexalpha/database/planModifiers.yaml
+++ /dev/null
@@ -1,51 +0,0 @@
-fields:
- - name: 'id'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'instance_id'
- validators:
- - validate.NoSeparator
- - validate.UUID
- modifiers:
- - 'RequiresReplace'
-
- - name: 'project_id'
- validators:
- - validate.NoSeparator
- - validate.UUID
- modifiers:
- - 'RequiresReplace'
-
- - name: 'region'
- modifiers:
- - 'RequiresReplace'
-
- - name: 'name'
- modifiers:
- - 'RequiresReplace'
-
- - name: 'collation'
- modifiers:
- - 'RequiresReplace'
-
- - name: 'owner'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'database_name'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'collation_name'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'compatibility'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'compatibility_level'
- modifiers:
- - 'UseStateForUnknown'
diff --git a/stackit/internal/services/sqlserverflexalpha/database/resource.go b/stackit/internal/services/sqlserverflexalpha/database/resource.go
index fffacb91..52866a9c 100644
--- a/stackit/internal/services/sqlserverflexalpha/database/resource.go
+++ b/stackit/internal/services/sqlserverflexalpha/database/resource.go
@@ -2,31 +2,20 @@ package sqlserverflexalpha
import (
"context"
- _ "embed"
- "errors"
"fmt"
- "net/http"
"strings"
- "time"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
- "github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-log/tflog"
"github.com/stackitcloud/stackit-sdk-go/core/config"
- "github.com/stackitcloud/stackit-sdk-go/core/oapierror"
- coreUtils "github.com/stackitcloud/stackit-sdk-go/core/utils"
-
- sqlserverflexalpha "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3alpha1api"
-
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- wait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
- sqlserverflexalphaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/database/resources_gen"
+ sqlserverflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/database/resources_gen"
)
var (
@@ -34,80 +23,23 @@ var (
_ resource.ResourceWithConfigure = &databaseResource{}
_ resource.ResourceWithImportState = &databaseResource{}
_ resource.ResourceWithModifyPlan = &databaseResource{}
- _ resource.ResourceWithIdentity = &databaseResource{}
-
- // Define errors
- errDatabaseNotFound = errors.New("database not found")
)
func NewDatabaseResource() resource.Resource {
return &databaseResource{}
}
-// resourceModel describes the resource data model.
-type resourceModel = sqlserverflexalphaResGen.DatabaseModel
-
type databaseResource struct {
client *sqlserverflexalpha.APIClient
providerData core.ProviderData
}
-type DatabaseResourceIdentityModel struct {
- ProjectID types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- InstanceID types.String `tfsdk:"instance_id"`
- DatabaseName types.String `tfsdk:"database_name"`
-}
-
-func (r *databaseResource) Metadata(
- _ context.Context,
- req resource.MetadataRequest,
- resp *resource.MetadataResponse,
-) {
+func (r *databaseResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_database"
}
-//go:embed planModifiers.yaml
-var modifiersFileByte []byte
-
func (r *databaseResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
- s := sqlserverflexalphaResGen.DatabaseResourceSchema(ctx)
-
- fields, err := utils.ReadModifiersConfig(modifiersFileByte)
- if err != nil {
- resp.Diagnostics.AddError("error during read modifiers config file", err.Error())
- return
- }
-
- err = utils.AddPlanModifiersToResourceSchema(fields, &s)
- if err != nil {
- resp.Diagnostics.AddError("error adding plan modifiers", err.Error())
- return
- }
- resp.Schema = s
-}
-
-func (r *databaseResource) IdentitySchema(
- _ context.Context,
- _ resource.IdentitySchemaRequest,
- resp *resource.IdentitySchemaResponse,
-) {
- resp.IdentitySchema = identityschema.Schema{
- Attributes: map[string]identityschema.Attribute{
- "project_id": identityschema.StringAttribute{
- RequiredForImport: true, // must be set during import by the practitioner
- },
- "region": identityschema.StringAttribute{
- RequiredForImport: true, // can be defaulted by the provider configuration
- },
- "instance_id": identityschema.StringAttribute{
- RequiredForImport: true, // can be defaulted by the provider configuration
- },
- "database_name": identityschema.StringAttribute{
- RequiredForImport: true, // can be defaulted by the provider configuration
- },
- },
- }
+ resp.Schema = sqlserverflexalphaGen.DatabaseResourceSchema(ctx)
}
// Configure adds the provider configured client to the resource.
@@ -126,11 +58,8 @@ func (r *databaseResource) Configure(
config.WithCustomAuth(r.providerData.RoundTripper),
utils.UserAgentConfigOption(r.providerData.Version),
}
- if r.providerData.SQLServerFlexCustomEndpoint != "" {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithEndpoint(r.providerData.SQLServerFlexCustomEndpoint),
- )
+ if r.providerData.PostgresFlexCustomEndpoint != "" {
+ apiClientConfigOptions = append(apiClientConfigOptions, config.WithEndpoint(r.providerData.PostgresFlexCustomEndpoint))
} else {
apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(r.providerData.GetRegion()))
}
@@ -138,10 +67,7 @@ func (r *databaseResource) Configure(
if err != nil {
resp.Diagnostics.AddError(
"Error configuring API client",
- fmt.Sprintf(
- "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
- err,
- ),
+ fmt.Sprintf("Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration", err),
)
return
}
@@ -150,8 +76,7 @@ func (r *databaseResource) Configure(
}
func (r *databaseResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
- var data resourceModel
- createErr := "DB create error"
+ var data sqlserverflexalphaGen.DatabaseModel
// Read Terraform plan data into the model
resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...)
@@ -160,283 +85,64 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques
return
}
- ctx = core.InitProviderContext(ctx)
+ // TODO: Create API call logic
- projectId := data.ProjectId.ValueString()
- region := data.Region.ValueString()
- instanceId := data.InstanceId.ValueString()
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
-
- databaseName := data.Name.ValueString()
- ctx = tflog.SetField(ctx, "database_name", databaseName)
-
- payLoad := sqlserverflexalpha.CreateDatabaseRequestPayload{}
- if !data.Collation.IsNull() && !data.Collation.IsUnknown() {
- payLoad.Collation = data.Collation.ValueStringPointer()
- }
-
- if !data.Compatibility.IsNull() && !data.Compatibility.IsUnknown() {
- payLoad.Compatibility = coreUtils.Ptr(int32(data.Compatibility.ValueInt64())) //nolint:gosec // TODO
- }
-
- payLoad.Name = data.Name.ValueString()
- payLoad.Owner = data.Owner.ValueString()
-
- createResp, err := r.client.DefaultAPI.CreateDatabaseRequest(ctx, projectId, region, instanceId).
- CreateDatabaseRequestPayload(payLoad).
- Execute()
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- createErr,
- fmt.Sprintf("Calling API: %v", err),
- )
- return
- }
-
- if createResp == nil || createResp.Id == 0 {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error creating database",
- "API didn't return database Id. A database might have been created",
- )
- return
- }
-
- databaseId := createResp.Id
-
- ctx = tflog.SetField(ctx, "database_id", databaseId)
-
- ctx = core.LogResponse(ctx)
-
- // Set data returned by API in identity
- identity := DatabaseResourceIdentityModel{
- ProjectID: types.StringValue(projectId),
- Region: types.StringValue(region),
- InstanceID: types.StringValue(instanceId),
- DatabaseName: types.StringValue(databaseName),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- // TODO: is this necessary to wait for the database-> API say 200 ?
- waitResp, err := wait.CreateDatabaseWaitHandler(
- ctx,
- r.client.DefaultAPI,
- projectId,
- instanceId,
- region,
- databaseName,
- ).SetSleepBeforeWait(
- 30 * time.Second,
- ).SetTimeout(
- 15 * time.Minute,
- ).WaitWithContext(ctx)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- createErr,
- fmt.Sprintf("Database creation waiting: %v", err),
- )
- return
- }
-
- if waitResp.Id == 0 {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- createErr,
- "Database creation waiting: returned id is nil",
- )
- return
- }
-
- if waitResp.Id != databaseId {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- createErr,
- "Database creation waiting: returned id is different",
- )
- return
- }
-
- if waitResp.Owner != data.Owner.ValueString() {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- createErr,
- "Database creation waiting: returned owner is different",
- )
- return
- }
-
- if waitResp.Name != data.Name.ValueString() {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- createErr,
- "Database creation waiting: returned name is different",
- )
- return
- }
-
- database, err := r.client.DefaultAPI.GetDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute()
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error creating database",
- fmt.Sprintf("Getting database details after creation: %v", err),
- )
- return
- }
-
- // Map response body to schema
- err = mapResourceFields(database, &data, region)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error creating database",
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
-
- // Set state to fully populated data
- resp.Diagnostics.Append(resp.State.Set(ctx, data)...)
- if resp.Diagnostics.HasError() {
- return
- }
+ // Example data value setting
+ // data.DatabaseId = types.StringValue("id-from-response")
// Save data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
tflog.Info(ctx, "sqlserverflexalpha.Database created")
}
func (r *databaseResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
- var model resourceModel
- diags := req.State.Get(ctx, &model)
- resp.Diagnostics.Append(diags...)
+ var data sqlserverflexalphaGen.DatabaseModel
+
+ // Read Terraform prior state data into the model
+ resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
+
if resp.Diagnostics.HasError() {
return
}
- ctx = core.InitProviderContext(ctx)
+ // Todo: Read API call logic
- projectId := model.ProjectId.ValueString()
- region := model.Region.ValueString()
- instanceId := model.InstanceId.ValueString()
- databaseName := model.DatabaseName.ValueString()
-
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
- ctx = tflog.SetField(ctx, "region", region)
- ctx = tflog.SetField(ctx, "database_name", databaseName)
-
- databaseResp, err := r.client.DefaultAPI.GetDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute()
- if err != nil {
- oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
- if (ok && oapiErr.StatusCode == http.StatusNotFound) || errors.Is(err, errDatabaseNotFound) {
- resp.State.RemoveResource(ctx)
- return
- }
- core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading database", fmt.Sprintf("Calling API: %v", err))
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- // Map response body to schema
- err = mapResourceFields(databaseResp, &model, region)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error reading database",
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
-
- // Save identity into Terraform state
- identity := DatabaseResourceIdentityModel{
- ProjectID: types.StringValue(projectId),
- Region: types.StringValue(region),
- InstanceID: types.StringValue(instanceId),
- DatabaseName: types.StringValue(databaseName),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- // Set refreshed state
- resp.Diagnostics.Append(resp.State.Set(ctx, &model)...)
- if resp.Diagnostics.HasError() {
- return
- }
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
tflog.Info(ctx, "sqlserverflexalpha.Database read")
}
-func (r *databaseResource) Update(ctx context.Context, _ resource.UpdateRequest, resp *resource.UpdateResponse) {
- // TODO: Check update api endpoint - not available at the moment, so return an error for now
- core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating database", "Database can't be updated")
+func (r *databaseResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
+ var data sqlserverflexalphaGen.DatabaseModel
+
+ // Read Terraform plan data into the model
+ resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ // Todo: Update API call logic
+
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+
+ tflog.Info(ctx, "sqlserverflexalpha.Database updated")
}
func (r *databaseResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
- // nolint:gocritic // function signature required by Terraform
- var model resourceModel
- diags := req.State.Get(ctx, &model)
- resp.Diagnostics.Append(diags...)
+ var data sqlserverflexalphaGen.DatabaseModel
+
+ // Read Terraform prior state data into the model
+ resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
+
if resp.Diagnostics.HasError() {
return
}
- // Read identity data
- var identityData DatabaseResourceIdentityModel
- resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := model.ProjectId.ValueString()
- region := model.Region.ValueString()
- instanceId := model.InstanceId.ValueString()
- databaseName := model.DatabaseName.ValueString()
-
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
- ctx = tflog.SetField(ctx, "region", region)
- ctx = tflog.SetField(ctx, "database_name", databaseName)
-
- // Delete existing record set
- err := r.client.DefaultAPI.DeleteDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute()
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error deleting database",
- fmt.Sprintf(
- "Calling API: %v\nname: %s, region: %s, instanceId: %s", err, databaseName, region, instanceId,
- ),
- )
- return
- }
-
- ctx = core.LogResponse(ctx)
- resp.State.RemoveResource(ctx)
+ // Todo: Delete API call logic
tflog.Info(ctx, "sqlserverflexalpha.Database deleted")
}
@@ -448,18 +154,17 @@ func (r *databaseResource) ModifyPlan(
req resource.ModifyPlanRequest,
resp *resource.ModifyPlanResponse,
) { // nolint:gocritic // function signature required by Terraform
+ var configModel sqlserverflexalphaGen.DatabaseModel
// skip initial empty configuration to avoid follow-up errors
if req.Config.Raw.IsNull() {
return
}
-
- var configModel resourceModel
resp.Diagnostics.Append(req.Config.Get(ctx, &configModel)...)
if resp.Diagnostics.HasError() {
return
}
- var planModel resourceModel
+ var planModel sqlserverflexalphaGen.DatabaseModel
resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...)
if resp.Diagnostics.HasError() {
return
@@ -483,59 +188,30 @@ func (r *databaseResource) ImportState(
req resource.ImportStateRequest,
resp *resource.ImportStateResponse,
) {
- ctx = core.InitProviderContext(ctx)
+ idParts := strings.Split(req.ID, core.Separator)
- if req.ID != "" {
- idParts := strings.Split(req.ID, core.Separator)
-
- if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" {
- core.LogAndAddError(
- ctx, &resp.Diagnostics,
- "Error importing database",
- fmt.Sprintf(
- "Expected import identifier with format [project_id],[region],[instance_id],[database_name] Got: %q",
- req.ID,
- ),
- )
- return
- }
-
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("database_name"), idParts[3])...)
-
- var identityData DatabaseResourceIdentityModel
- identityData.ProjectID = types.StringValue(idParts[0])
- identityData.Region = types.StringValue(idParts[1])
- identityData.InstanceID = types.StringValue(idParts[2])
- identityData.DatabaseName = types.StringValue(idParts[3])
-
- resp.Diagnostics.Append(resp.Identity.Set(ctx, &identityData)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- tflog.Info(ctx, "sqlserverflexalpha database state imported")
+ // Todo: Import logic
+ if len(idParts) < 2 || idParts[0] == "" || idParts[1] == "" {
+ core.LogAndAddError(
+ ctx, &resp.Diagnostics,
+ "Error importing database",
+ fmt.Sprintf(
+ "Expected import identifier with format [project_id],[region],..., got %q",
+ req.ID,
+ ),
+ )
return
}
- // If no ID is provided, attempt to read identity attributes from the import configuration
- var identityData DatabaseResourceIdentityModel
- resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
- if resp.Diagnostics.HasError() {
- return
- }
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
+ // ... more ...
- projectId := identityData.ProjectID.ValueString()
- region := identityData.Region.ValueString()
- instanceId := identityData.InstanceID.ValueString()
- databaseName := identityData.DatabaseName.ValueString()
-
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("database_name"), databaseName)...)
-
- tflog.Info(ctx, "sqlserverflexalpha database state imported")
+ core.LogAndAddWarning(
+ ctx,
+ &resp.Diagnostics,
+ "Sqlserverflexalpha database imported with empty password",
+ "The database password is not imported as it is only available upon creation of a new database. The password field will be empty.",
+ )
+ tflog.Info(ctx, "Sqlserverflexalpha database state imported")
}
diff --git a/stackit/internal/services/sqlserverflexalpha/flavor/datasource.go b/stackit/internal/services/sqlserverflexalpha/flavor/datasource.go
index fb5a9273..1deb2beb 100644
--- a/stackit/internal/services/sqlserverflexalpha/flavor/datasource.go
+++ b/stackit/internal/services/sqlserverflexalpha/flavor/datasource.go
@@ -1,4 +1,4 @@
-package sqlserverflexalphaFlavor
+package sqlserverFlexAlphaFlavor
import (
"context"
@@ -10,15 +10,14 @@ import (
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
"github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
- sqlserverflexalphaPkg "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3alpha1api"
-
sqlserverflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/flavor/datasources_gen"
+ sqlserverflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils"
)
// Ensure the implementation satisfies the expected interfaces.
@@ -49,7 +48,7 @@ func NewFlavorDataSource() datasource.DataSource {
// flavorDataSource is the data source implementation.
type flavorDataSource struct {
- client *sqlserverflexalphaPkg.APIClient
+ client *sqlserverflexalpha.APIClient
providerData core.ProviderData
}
@@ -66,34 +65,12 @@ func (r *flavorDataSource) Configure(ctx context.Context, req datasource.Configu
return
}
- apiClientConfigOptions := []config.ConfigurationOption{
- config.WithCustomAuth(r.providerData.RoundTripper),
- utils.UserAgentConfigOption(r.providerData.Version),
- }
- if r.providerData.SQLServerFlexCustomEndpoint != "" {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithEndpoint(r.providerData.SQLServerFlexCustomEndpoint),
- )
- } else {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithRegion(r.providerData.GetRegion()),
- )
- }
- apiClient, err := sqlserverflexalphaPkg.NewAPIClient(apiClientConfigOptions...)
- if err != nil {
- resp.Diagnostics.AddError(
- "Error configuring API client",
- fmt.Sprintf(
- "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
- err,
- ),
- )
+ apiClient := sqlserverflexUtils.ConfigureClient(ctx, &r.providerData, &resp.Diagnostics)
+ if resp.Diagnostics.HasError() {
return
}
r.client = apiClient
- tflog.Info(ctx, "SQL Server Flex instance client configured")
+ tflog.Info(ctx, "Postgres Flex instance client configured")
}
func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
@@ -101,13 +78,13 @@ func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaReques
Attributes: map[string]schema.Attribute{
"project_id": schema.StringAttribute{
Required: true,
- Description: "The project ID of the flavor.",
- MarkdownDescription: "The project ID of the flavor.",
+ Description: "The cpu count of the instance.",
+ MarkdownDescription: "The cpu count of the instance.",
},
"region": schema.StringAttribute{
Required: true,
- Description: "The region of the flavor.",
- MarkdownDescription: "The region of the flavor.",
+ Description: "The flavor description.",
+ MarkdownDescription: "The flavor description.",
},
"cpu": schema.Int64Attribute{
Required: true,
@@ -124,16 +101,6 @@ func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaReques
Description: "The memory of the instance in Gibibyte.",
MarkdownDescription: "The memory of the instance in Gibibyte.",
},
- "node_type": schema.StringAttribute{
- Required: true,
- Description: "defines the nodeType it can be either single or HA",
- MarkdownDescription: "defines the nodeType it can be either single or HA",
- },
- "flavor_id": schema.StringAttribute{
- Computed: true,
- Description: "The id of the instance flavor.",
- MarkdownDescription: "The id of the instance flavor.",
- },
"description": schema.StringAttribute{
Computed: true,
Description: "The flavor description.",
@@ -141,8 +108,13 @@ func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaReques
},
"id": schema.StringAttribute{
Computed: true,
- Description: "The id of the instance flavor.",
- MarkdownDescription: "The id of the instance flavor.",
+ Description: "The terraform id of the instance flavor.",
+ MarkdownDescription: "The terraform id of the instance flavor.",
+ },
+ "flavor_id": schema.StringAttribute{
+ Computed: true,
+ Description: "The flavor id of the instance flavor.",
+ MarkdownDescription: "The flavor id of the instance flavor.",
},
"max_gb": schema.Int64Attribute{
Computed: true,
@@ -154,7 +126,13 @@ func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaReques
Description: "minimum storage which is required to order in Gigabyte.",
MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
},
+ "node_type": schema.StringAttribute{
+ Required: true,
+ Description: "defines the nodeType it can be either single or replica",
+ MarkdownDescription: "defines the nodeType it can be either single or replica",
+ },
"storage_classes": schema.ListNestedAttribute{
+ Computed: true,
NestedObject: schema.NestedAttributeObject{
Attributes: map[string]schema.Attribute{
"class": schema.StringAttribute{
@@ -173,89 +151,8 @@ func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaReques
},
},
},
- Computed: true,
- Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
- MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
},
},
- //Attributes: map[string]schema.Attribute{
- // "project_id": schema.StringAttribute{
- // Required: true,
- // Description: "The cpu count of the instance.",
- // MarkdownDescription: "The cpu count of the instance.",
- // },
- // "region": schema.StringAttribute{
- // Required: true,
- // Description: "The flavor description.",
- // MarkdownDescription: "The flavor description.",
- // },
- // "cpu": schema.Int64Attribute{
- // Required: true,
- // Description: "The cpu count of the instance.",
- // MarkdownDescription: "The cpu count of the instance.",
- // },
- // "ram": schema.Int64Attribute{
- // Required: true,
- // Description: "The memory of the instance in Gibibyte.",
- // MarkdownDescription: "The memory of the instance in Gibibyte.",
- // },
- // "storage_class": schema.StringAttribute{
- // Required: true,
- // Description: "The memory of the instance in Gibibyte.",
- // MarkdownDescription: "The memory of the instance in Gibibyte.",
- // },
- // "description": schema.StringAttribute{
- // Computed: true,
- // Description: "The flavor description.",
- // MarkdownDescription: "The flavor description.",
- // },
- // "id": schema.StringAttribute{
- // Computed: true,
- // Description: "The terraform id of the instance flavor.",
- // MarkdownDescription: "The terraform id of the instance flavor.",
- // },
- // "flavor_id": schema.StringAttribute{
- // Computed: true,
- // Description: "The flavor id of the instance flavor.",
- // MarkdownDescription: "The flavor id of the instance flavor.",
- // },
- // "max_gb": schema.Int64Attribute{
- // Computed: true,
- // Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
- // MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
- // },
- // "min_gb": schema.Int64Attribute{
- // Computed: true,
- // Description: "minimum storage which is required to order in Gigabyte.",
- // MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
- // },
- // "node_type": schema.StringAttribute{
- // Required: true,
- // Description: "defines the nodeType it can be either single or replica",
- // MarkdownDescription: "defines the nodeType it can be either single or replica",
- // },
- // "storage_classes": schema.ListNestedAttribute{
- // Computed: true,
- // NestedObject: schema.NestedAttributeObject{
- // Attributes: map[string]schema.Attribute{
- // "class": schema.StringAttribute{
- // Computed: true,
- // },
- // "max_io_per_sec": schema.Int64Attribute{
- // Computed: true,
- // },
- // "max_through_in_mb": schema.Int64Attribute{
- // Computed: true,
- // },
- // },
- // CustomType: sqlserverflexalphaGen.StorageClassesType{
- // ObjectType: types.ObjectType{
- // AttrTypes: sqlserverflexalphaGen.StorageClassesValue{}.AttributeTypes(ctx),
- // },
- // },
- // },
- // },
- // },
}
}
@@ -274,25 +171,25 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest,
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "region", region)
- flavors, err := getAllFlavors(ctx, r.client.DefaultAPI, projectId, region)
+ flavors, err := getAllFlavors(ctx, r.client, projectId, region)
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading flavors", fmt.Sprintf("getAllFlavors: %v", err))
return
}
- var foundFlavors []sqlserverflexalphaPkg.ListFlavors
+ var foundFlavors []sqlserverflexalpha.ListFlavors
for _, flavor := range flavors {
- if model.Cpu.ValueInt64() != flavor.Cpu {
+ if model.Cpu.ValueInt64() != *flavor.Cpu {
continue
}
- if model.Memory.ValueInt64() != flavor.Memory {
+ if model.Memory.ValueInt64() != *flavor.Memory {
continue
}
- if model.NodeType.ValueString() != flavor.NodeType {
+ if model.NodeType.ValueString() != *flavor.NodeType {
continue
}
- for _, sc := range flavor.StorageClasses {
- if model.StorageClass.ValueString() != sc.Class {
+ for _, sc := range *flavor.StorageClasses {
+ if model.StorageClass.ValueString() != *sc.Class {
continue
}
foundFlavors = append(foundFlavors, flavor)
@@ -308,11 +205,11 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest,
}
f := foundFlavors[0]
- model.Description = types.StringValue(f.Description)
- model.Id = utils.BuildInternalTerraformId(model.ProjectId.ValueString(), region, f.Id)
- model.FlavorId = types.StringValue(f.Id)
- model.MaxGb = types.Int64Value(int64(f.MaxGB))
- model.MinGb = types.Int64Value(int64(f.MinGB))
+ model.Description = types.StringValue(*f.Description)
+ model.Id = utils.BuildInternalTerraformId(model.ProjectId.ValueString(), region, *f.Id)
+ model.FlavorId = types.StringValue(*f.Id)
+ model.MaxGb = types.Int64Value(*f.MaxGB)
+ model.MinGb = types.Int64Value(*f.MinGB)
if f.StorageClasses == nil {
model.StorageClasses = types.ListNull(sqlserverflexalphaGen.StorageClassesType{
@@ -322,15 +219,15 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest,
})
} else {
var scList []attr.Value
- for _, sc := range f.StorageClasses {
+ for _, sc := range *f.StorageClasses {
scList = append(
scList,
sqlserverflexalphaGen.NewStorageClassesValueMust(
sqlserverflexalphaGen.StorageClassesValue{}.AttributeTypes(ctx),
map[string]attr.Value{
- "class": types.StringValue(sc.Class),
- "max_io_per_sec": types.Int64Value(int64(sc.MaxIoPerSec)),
- "max_through_in_mb": types.Int64Value(int64(sc.MaxThroughInMb)),
+ "class": types.StringValue(*sc.Class),
+ "max_io_per_sec": types.Int64Value(*sc.MaxIoPerSec),
+ "max_through_in_mb": types.Int64Value(*sc.MaxThroughInMb),
},
),
)
@@ -352,5 +249,5 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest,
if resp.Diagnostics.HasError() {
return
}
- tflog.Info(ctx, "SQL Server Flex flavors read")
+ tflog.Info(ctx, "Postgres Flex flavors read")
}
diff --git a/stackit/internal/services/sqlserverflexalpha/flavor/functions.go b/stackit/internal/services/sqlserverflexalpha/flavor/functions.go
index 889c95d2..e396324a 100644
--- a/stackit/internal/services/sqlserverflexalpha/flavor/functions.go
+++ b/stackit/internal/services/sqlserverflexalpha/flavor/functions.go
@@ -1,10 +1,10 @@
-package sqlserverflexalphaFlavor
+package sqlserverFlexAlphaFlavor
import (
"context"
"fmt"
- sqlserverflexalpha "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3alpha1api"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
)
type flavorsClientReader interface {
@@ -50,11 +50,11 @@ func getFlavorsByFilter(
}
// If the API returns no flavors, we have reached the end of the list.
- if len(res.Flavors) == 0 {
+ if res.Flavors == nil || len(*res.Flavors) == 0 {
break
}
- for _, flavor := range res.Flavors {
+ for _, flavor := range *res.Flavors {
if filter(flavor) {
result = append(result, flavor)
}
diff --git a/stackit/internal/services/sqlserverflexalpha/flavor/functions_test.go b/stackit/internal/services/sqlserverflexalpha/flavor/functions_test.go
index cd80c871..0246d866 100644
--- a/stackit/internal/services/sqlserverflexalpha/flavor/functions_test.go
+++ b/stackit/internal/services/sqlserverflexalpha/flavor/functions_test.go
@@ -1,61 +1,83 @@
-package sqlserverflexalphaFlavor
+package sqlserverFlexAlphaFlavor
import (
"context"
"testing"
- "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3alpha1api"
+ "github.com/stackitcloud/stackit-sdk-go/core/utils"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
)
-var mockResp = func(page int64) (*v3alpha1api.GetFlavorsResponse, error) {
+type mockRequest struct {
+ executeFunc func() (*sqlserverflexalpha.GetFlavorsResponse, error)
+}
+
+func (m *mockRequest) Page(_ int64) sqlserverflexalpha.ApiGetFlavorsRequestRequest { return m }
+func (m *mockRequest) Size(_ int64) sqlserverflexalpha.ApiGetFlavorsRequestRequest { return m }
+func (m *mockRequest) Sort(_ sqlserverflexalpha.FlavorSort) sqlserverflexalpha.ApiGetFlavorsRequestRequest {
+ return m
+}
+func (m *mockRequest) Execute() (*sqlserverflexalpha.GetFlavorsResponse, error) {
+ return m.executeFunc()
+}
+
+type mockFlavorsClient struct {
+ executeRequest func() sqlserverflexalpha.ApiGetFlavorsRequestRequest
+}
+
+func (m *mockFlavorsClient) GetFlavorsRequest(_ context.Context, _, _ string) sqlserverflexalpha.ApiGetFlavorsRequestRequest {
+ return m.executeRequest()
+}
+
+var mockResp = func(page int64) (*sqlserverflexalpha.GetFlavorsResponse, error) {
if page == 1 {
- return &v3alpha1api.GetFlavorsResponse{
- Flavors: []v3alpha1api.ListFlavors{
- {Id: "flavor-1", Description: "first"},
- {Id: "flavor-2", Description: "second"},
+ return &sqlserverflexalpha.GetFlavorsResponse{
+ Flavors: &[]sqlserverflexalpha.ListFlavors{
+ {Id: utils.Ptr("flavor-1"), Description: utils.Ptr("first")},
+ {Id: utils.Ptr("flavor-2"), Description: utils.Ptr("second")},
},
}, nil
}
if page == 2 {
- return &v3alpha1api.GetFlavorsResponse{
- Flavors: []v3alpha1api.ListFlavors{
- {Id: "flavor-3", Description: "three"},
+ return &sqlserverflexalpha.GetFlavorsResponse{
+ Flavors: &[]sqlserverflexalpha.ListFlavors{
+ {Id: utils.Ptr("flavor-3"), Description: utils.Ptr("three")},
},
}, nil
}
- return &v3alpha1api.GetFlavorsResponse{
- Flavors: []v3alpha1api.ListFlavors{},
+ return &sqlserverflexalpha.GetFlavorsResponse{
+ Flavors: &[]sqlserverflexalpha.ListFlavors{},
}, nil
}
func TestGetFlavorsByFilter(t *testing.T) {
tests := []struct {
description string
- projectID string
+ projectId string
region string
mockErr error
- filter func(v3alpha1api.ListFlavors) bool
+ filter func(sqlserverflexalpha.ListFlavors) bool
wantCount int
wantErr bool
}{
{
description: "Success - Get all flavors (2 pages)",
- projectID: "pid", region: "reg",
- filter: func(_ v3alpha1api.ListFlavors) bool { return true },
+ projectId: "pid", region: "reg",
+ filter: func(_ sqlserverflexalpha.ListFlavors) bool { return true },
wantCount: 3,
wantErr: false,
},
{
description: "Success - Filter flavors by description",
- projectID: "pid", region: "reg",
- filter: func(f v3alpha1api.ListFlavors) bool { return f.Description == "first" },
+ projectId: "pid", region: "reg",
+ filter: func(f sqlserverflexalpha.ListFlavors) bool { return *f.Description == "first" },
wantCount: 1,
wantErr: false,
},
{
description: "Error - Missing parameters",
- projectID: "", region: "reg",
+ projectId: "", region: "reg",
wantErr: true,
},
}
@@ -64,15 +86,17 @@ func TestGetFlavorsByFilter(t *testing.T) {
t.Run(
tt.description, func(t *testing.T) {
var currentPage int64
- getFlavorsMock := func(_ v3alpha1api.ApiGetFlavorsRequestRequest) (*v3alpha1api.GetFlavorsResponse, error) {
- currentPage++
- return mockResp(currentPage)
+ client := &mockFlavorsClient{
+ executeRequest: func() sqlserverflexalpha.ApiGetFlavorsRequestRequest {
+ return &mockRequest{
+ executeFunc: func() (*sqlserverflexalpha.GetFlavorsResponse, error) {
+ currentPage++
+ return mockResp(currentPage)
+ },
+ }
+ },
}
-
- client := v3alpha1api.DefaultAPIServiceMock{
- GetFlavorsRequestExecuteMock: &getFlavorsMock,
- }
- actual, err := getFlavorsByFilter(context.Background(), client, tt.projectID, tt.region, tt.filter)
+ actual, err := getFlavorsByFilter(context.Background(), client, tt.projectId, tt.region, tt.filter)
if (err != nil) != tt.wantErr {
t.Errorf("getFlavorsByFilter() error = %v, wantErr %v", err, tt.wantErr)
@@ -89,14 +113,15 @@ func TestGetFlavorsByFilter(t *testing.T) {
func TestGetAllFlavors(t *testing.T) {
var currentPage int64
-
- getFlavorsMock := func(_ v3alpha1api.ApiGetFlavorsRequestRequest) (*v3alpha1api.GetFlavorsResponse, error) {
- currentPage++
- return mockResp(currentPage)
- }
-
- client := v3alpha1api.DefaultAPIServiceMock{
- GetFlavorsRequestExecuteMock: &getFlavorsMock,
+ client := &mockFlavorsClient{
+ executeRequest: func() sqlserverflexalpha.ApiGetFlavorsRequestRequest {
+ return &mockRequest{
+ executeFunc: func() (*sqlserverflexalpha.GetFlavorsResponse, error) {
+ currentPage++
+ return mockResp(currentPage)
+ },
+ }
+ },
}
res, err := getAllFlavors(context.Background(), client, "pid", "reg")
diff --git a/stackit/internal/services/sqlserverflexalpha/flavors/datasource.go b/stackit/internal/services/sqlserverflexalpha/flavors/datasource.go
index 8727b606..27609fc5 100644
--- a/stackit/internal/services/sqlserverflexalpha/flavors/datasource.go
+++ b/stackit/internal/services/sqlserverflexalpha/flavors/datasource.go
@@ -2,103 +2,54 @@ package sqlserverflexalpha
import (
"context"
- "fmt"
- "net/http"
"github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
-
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-
- sqlserverflexalphaPkg "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3alpha1api"
+ sqlserverflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils"
sqlserverflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/flavors/datasources_gen"
)
var _ datasource.DataSource = (*flavorsDataSource)(nil)
-const errorPrefix = "[sqlserverflexalpha - Flavors]"
-
func NewFlavorsDataSource() datasource.DataSource {
return &flavorsDataSource{}
}
-type dataSourceModel struct {
- sqlserverflexalphaGen.FlavorsModel
- TerraformId types.String `tfsdk:"id"`
-}
-
type flavorsDataSource struct {
- client *sqlserverflexalphaPkg.APIClient
+ client *sqlserverflexalpha.APIClient
providerData core.ProviderData
}
-func (d *flavorsDataSource) Metadata(
- _ context.Context,
- req datasource.MetadataRequest,
- resp *datasource.MetadataResponse,
-) {
+func (d *flavorsDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_flavors"
}
func (d *flavorsDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
resp.Schema = sqlserverflexalphaGen.FlavorsDataSourceSchema(ctx)
- resp.Schema.Attributes["id"] = schema.StringAttribute{
- Computed: true,
- Description: "The terraform internal identifier.",
- MarkdownDescription: "The terraform internal identifier.",
- }
}
// Configure adds the provider configured client to the data source.
-func (d *flavorsDataSource) Configure(
- ctx context.Context,
- req datasource.ConfigureRequest,
- resp *datasource.ConfigureResponse,
-) {
+func (d *flavorsDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
var ok bool
d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
if !ok {
return
}
- apiClientConfigOptions := []config.ConfigurationOption{
- config.WithCustomAuth(d.providerData.RoundTripper),
- utils.UserAgentConfigOption(d.providerData.Version),
- }
- if d.providerData.SQLServerFlexCustomEndpoint != "" {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithEndpoint(d.providerData.SQLServerFlexCustomEndpoint),
- )
- } else {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithRegion(d.providerData.GetRegion()),
- )
- }
- apiClient, err := sqlserverflexalphaPkg.NewAPIClient(apiClientConfigOptions...)
- if err != nil {
- resp.Diagnostics.AddError(
- "Error configuring API client",
- fmt.Sprintf(
- "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
- err,
- ),
- )
+ apiClient := sqlserverflexUtils.ConfigureClient(ctx, &d.providerData, &resp.Diagnostics)
+ if resp.Diagnostics.HasError() {
return
}
d.client = apiClient
- tflog.Info(ctx, fmt.Sprintf("%s client configured", errorPrefix))
+ tflog.Info(ctx, "SQL SERVER Flex flavors client configured")
}
func (d *flavorsDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var data dataSourceModel
+ var data sqlserverflexalphaGen.FlavorsModel
// Read Terraform configuration data into the model
resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
@@ -107,50 +58,11 @@ func (d *flavorsDataSource) Read(ctx context.Context, req datasource.ReadRequest
return
}
- ctx = core.InitProviderContext(ctx)
+ // Todo: Read API call logic
- projectId := data.ProjectId.ValueString()
- region := d.providerData.GetRegionWithOverride(data.Region)
- // TODO: implement right identifier for flavors
- flavorsId := data.Flavors
+ // Example data value setting
+ // data.Id = types.StringValue("example-id")
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
-
- // TODO: implement needed fields
- ctx = tflog.SetField(ctx, "flavors_id", flavorsId)
-
- // TODO: refactor to correct implementation
- _, err := d.client.DefaultAPI.GetFlavorsRequest(ctx, projectId, region).Execute()
- if err != nil {
- utils.LogError(
- ctx,
- &resp.Diagnostics,
- err,
- "Reading flavors",
- fmt.Sprintf("flavors with ID %q does not exist in project %q.", flavorsId, projectId),
- map[int]string{
- http.StatusForbidden: fmt.Sprintf("Project with ID %q not found or forbidden access", projectId),
- },
- )
- resp.State.RemoveResource(ctx)
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- // TODO: refactor to correct implementation of internal tf id
- data.TerraformId = utils.BuildInternalTerraformId(projectId, region)
-
- // TODO: fill remaining fields
- // data.Flavors = types.Sometype(apiResponse.GetFlavors())
- // data.Page = types.Sometype(apiResponse.GetPage())
- // data.Pagination = types.Sometype(apiResponse.GetPagination())
- // data.ProjectId = types.Sometype(apiResponse.GetProjectId())
- // data.Region = types.Sometype(apiResponse.GetRegion())
- // data.Size = types.Sometype(apiResponse.GetSize())
- // data.Sort = types.Sometype(apiResponse.GetSort())// Save data into Terraform state
+ // Save data into Terraform state
resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-
- tflog.Info(ctx, fmt.Sprintf("%s read successful", errorPrefix))
}
diff --git a/stackit/internal/services/sqlserverflexalpha/flavors/datasources_gen/flavors_data_source_gen.go b/stackit/internal/services/sqlserverflexalpha/flavors/datasources_gen/flavors_data_source_gen.go
index 40f086e2..43ac64f5 100644
--- a/stackit/internal/services/sqlserverflexalpha/flavors/datasources_gen/flavors_data_source_gen.go
+++ b/stackit/internal/services/sqlserverflexalpha/flavors/datasources_gen/flavors_data_source_gen.go
@@ -33,7 +33,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The flavor description.",
MarkdownDescription: "The flavor description.",
},
- "tf_original_api_id": schema.StringAttribute{
+ "id": schema.StringAttribute{
Computed: true,
Description: "The id of the instance flavor.",
MarkdownDescription: "The id of the instance flavor.",
diff --git a/stackit/internal/services/sqlserverflexalpha/instance/datasource.go b/stackit/internal/services/sqlserverflexalpha/instance/datasource.go
index 32dd3ed1..5b0fb0fd 100644
--- a/stackit/internal/services/sqlserverflexalpha/instance/datasource.go
+++ b/stackit/internal/services/sqlserverflexalpha/instance/datasource.go
@@ -1,125 +1,248 @@
-package sqlserverflexalpha
+// Copyright (c) STACKIT
+
+package sqlserverflex
import (
"context"
"fmt"
"net/http"
- "github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
-
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
+ sqlserverflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils"
+
+ sqlserverflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
+
+ "github.com/hashicorp/terraform-plugin-framework/datasource"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
+ "github.com/hashicorp/terraform-plugin-log/tflog"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/validate"
- "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3alpha1api"
-
- sqlserverflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance/datasources_gen"
+ "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
)
-var _ datasource.DataSource = (*instanceDataSource)(nil)
-
-const errorPrefix = "[sqlserverflexalpha - Instance]"
+// Ensure the implementation satisfies the expected interfaces.
+var (
+ _ datasource.DataSource = &instanceDataSource{}
+)
+// NewInstanceDataSource is a helper function to simplify the provider implementation.
func NewInstanceDataSource() datasource.DataSource {
return &instanceDataSource{}
}
-// dataSourceModel maps the data source schema data.
-type dataSourceModel struct {
- sqlserverflexalphaGen.InstanceModel
- TerraformID types.String `tfsdk:"id"`
-}
-
+// instanceDataSource is the data source implementation.
type instanceDataSource struct {
- client *v3alpha1api.APIClient
+ client *sqlserverflex.APIClient
providerData core.ProviderData
}
-func (d *instanceDataSource) Metadata(
- _ context.Context,
- req datasource.MetadataRequest,
- resp *datasource.MetadataResponse,
-) {
+// Metadata returns the data source type name.
+func (r *instanceDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_instance"
}
-func (d *instanceDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- resp.Schema = sqlserverflexalphaGen.InstanceDataSourceSchema(ctx)
-}
-
// Configure adds the provider configured client to the data source.
-func (d *instanceDataSource) Configure(
- ctx context.Context,
- req datasource.ConfigureRequest,
- resp *datasource.ConfigureResponse,
-) {
+func (r *instanceDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
var ok bool
- d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
+ r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
if !ok {
return
}
- apiClientConfigOptions := []config.ConfigurationOption{
- config.WithCustomAuth(d.providerData.RoundTripper),
- utils.UserAgentConfigOption(d.providerData.Version),
- }
- if d.providerData.SQLServerFlexCustomEndpoint != "" {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithEndpoint(d.providerData.SQLServerFlexCustomEndpoint),
- )
- } else {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithRegion(d.providerData.GetRegion()),
- )
- }
- apiClient, err := v3alpha1api.NewAPIClient(apiClientConfigOptions...)
- if err != nil {
- resp.Diagnostics.AddError(
- "Error configuring API client",
- fmt.Sprintf(
- "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
- err,
- ),
- )
+ apiClient := sqlserverflexUtils.ConfigureClient(ctx, &r.providerData, &resp.Diagnostics)
+ if resp.Diagnostics.HasError() {
return
}
- d.client = apiClient
- tflog.Info(ctx, fmt.Sprintf("%s client configured", errorPrefix))
+ r.client = apiClient
+ tflog.Info(ctx, "SQLServer Flex instance client configured")
}
-func (d *instanceDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var data dataSourceModel
+// Schema defines the schema for the data source.
+func (r *instanceDataSource) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
+ descriptions := map[string]string{
+ "main": "SQLServer Flex ALPHA instance resource schema. Must have a `region` specified in the provider configuration.",
+ "id": "Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`\".",
+ "instance_id": "ID of the SQLServer Flex instance.",
+ "project_id": "STACKIT project ID to which the instance is associated.",
+ "name": "Instance name.",
+ "access_scope": "The access scope of the instance. (e.g. SNA)",
+ "acl": "The Access Control List (ACL) for the SQLServer Flex instance.",
+ "backup_schedule": `The backup schedule. Should follow the cron scheduling system format (e.g. "0 0 * * *")`,
+ "region": "The resource region. If not defined, the provider region is used.",
+ "encryption": "The encryption block.",
+ "network": "The network block.",
+ "keyring_id": "STACKIT KMS - KeyRing ID of the encryption key to use.",
+ "key_id": "STACKIT KMS - Key ID of the encryption key to use.",
+ "key_version": "STACKIT KMS - Key version to use in the encryption key.",
+ "service:account": "STACKIT KMS - service account to use in the encryption key.",
+ "instance_address": "The returned instance IP address of the SQLServer Flex instance.",
+ "router_address": "The returned router IP address of the SQLServer Flex instance.",
+ }
- // Read Terraform configuration data into the model
- resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
+ resp.Schema = schema.Schema{
+ Description: descriptions["main"],
+ Attributes: map[string]schema.Attribute{
+ "id": schema.StringAttribute{
+ Description: descriptions["id"],
+ Computed: true,
+ },
+ "instance_id": schema.StringAttribute{
+ Description: descriptions["instance_id"],
+ Required: true,
+ Validators: []validator.String{
+ validate.UUID(),
+ validate.NoSeparator(),
+ },
+ },
+ "project_id": schema.StringAttribute{
+ Description: descriptions["project_id"],
+ Required: true,
+ Validators: []validator.String{
+ validate.UUID(),
+ validate.NoSeparator(),
+ },
+ },
+ "name": schema.StringAttribute{
+ Description: descriptions["name"],
+ Computed: true,
+ },
+ "backup_schedule": schema.StringAttribute{
+ Description: descriptions["backup_schedule"],
+ Computed: true,
+ },
+ "is_deletable": schema.BoolAttribute{
+ Description: descriptions["is_deletable"],
+ Computed: true,
+ },
+ "flavor": schema.SingleNestedAttribute{
+ Computed: true,
+ Attributes: map[string]schema.Attribute{
+ "id": schema.StringAttribute{
+ Computed: true,
+ },
+ "description": schema.StringAttribute{
+ Computed: true,
+ },
+ "cpu": schema.Int64Attribute{
+ Computed: true,
+ },
+ "ram": schema.Int64Attribute{
+ Computed: true,
+ },
+ "node_type": schema.StringAttribute{
+ Computed: true,
+ },
+ },
+ },
+ "replicas": schema.Int64Attribute{
+ Computed: true,
+ },
+ "storage": schema.SingleNestedAttribute{
+ Computed: true,
+ Attributes: map[string]schema.Attribute{
+ "class": schema.StringAttribute{
+ Computed: true,
+ },
+ "size": schema.Int64Attribute{
+ Computed: true,
+ },
+ },
+ },
+ "version": schema.StringAttribute{
+ Computed: true,
+ },
+ "status": schema.StringAttribute{
+ Computed: true,
+ },
+ "edition": schema.StringAttribute{
+ Computed: true,
+ },
+ "retention_days": schema.Int64Attribute{
+ Computed: true,
+ },
+ "region": schema.StringAttribute{
+ // the region cannot be found, so it has to be passed
+ Optional: true,
+ Description: descriptions["region"],
+ },
+ "encryption": schema.SingleNestedAttribute{
+ Computed: true,
+ Attributes: map[string]schema.Attribute{
+ "key_id": schema.StringAttribute{
+ Description: descriptions["key_id"],
+ Computed: true,
+ },
+ "key_version": schema.StringAttribute{
+ Description: descriptions["key_version"],
+ Computed: true,
+ },
+ "keyring_id": schema.StringAttribute{
+ Description: descriptions["keyring_id"],
+ Computed: true,
+ },
+ "service_account": schema.StringAttribute{
+ Description: descriptions["service_account"],
+ Computed: true,
+ },
+ },
+ Description: descriptions["encryption"],
+ },
+ "network": schema.SingleNestedAttribute{
+ Computed: true,
+ Attributes: map[string]schema.Attribute{
+ "access_scope": schema.StringAttribute{
+ Description: descriptions["access_scope"],
+ Computed: true,
+ },
+ "instance_address": schema.StringAttribute{
+ Description: descriptions["instance_address"],
+ Computed: true,
+ },
+ "router_address": schema.StringAttribute{
+ Description: descriptions["router_address"],
+ Computed: true,
+ },
+ "acl": schema.ListAttribute{
+ Description: descriptions["acl"],
+ ElementType: types.StringType,
+ Computed: true,
+ },
+ },
+ Description: descriptions["network"],
+ },
+ },
+ }
+}
+// Read refreshes the Terraform state with the latest data.
+func (r *instanceDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { // nolint:gocritic // function signature required by Terraform
+ var model Model
+ diags := req.Config.Get(ctx, &model)
+ resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
ctx = core.InitProviderContext(ctx)
- projectId := data.ProjectId.ValueString()
- region := d.providerData.GetRegionWithOverride(data.Region)
- instanceId := data.InstanceId.ValueString()
-
+ projectId := model.ProjectId.ValueString()
+ instanceId := model.InstanceId.ValueString()
+ region := r.providerData.GetRegionWithOverride(model.Region)
ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
ctx = tflog.SetField(ctx, "instance_id", instanceId)
-
- instanceResp, err := d.client.DefaultAPI.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
+ ctx = tflog.SetField(ctx, "region", region)
+ instanceResp, err := r.client.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
if err != nil {
utils.LogError(
ctx,
&resp.Diagnostics,
err,
"Reading instance",
- fmt.Sprintf("instance with ID %q does not exist in project %q.", instanceId, projectId),
+ fmt.Sprintf("Instance with ID %q does not exist in project %q.", instanceId, projectId),
map[int]string{
http.StatusForbidden: fmt.Sprintf("Project with ID %q not found or forbidden access", projectId),
},
@@ -130,17 +253,43 @@ func (d *instanceDataSource) Read(ctx context.Context, req datasource.ReadReques
ctx = core.LogResponse(ctx)
- err = mapDataResponseToModel(ctx, instanceResp, &data, resp.Diagnostics)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- fmt.Sprintf("%s Read", errorPrefix),
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
+ var storage = &storageModel{}
+ if !model.Storage.IsNull() && !model.Storage.IsUnknown() {
+ diags = model.Storage.As(ctx, storage, basetypes.ObjectAsOptions{})
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
}
- // Save data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+ var encryption = &encryptionModel{}
+ if !model.Encryption.IsNull() && !model.Encryption.IsUnknown() {
+ diags = model.Encryption.As(ctx, encryption, basetypes.ObjectAsOptions{})
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+ }
+
+ var network = &networkModel{}
+ if !model.Network.IsNull() && !model.Network.IsUnknown() {
+ diags = model.Network.As(ctx, network, basetypes.ObjectAsOptions{})
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+ }
+
+ err = mapFields(ctx, instanceResp, &model, storage, encryption, network, region)
+ if err != nil {
+ core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading instance", fmt.Sprintf("Processing API payload: %v", err))
+ return
+ }
+ // Set refreshed state
+ diags = resp.State.Set(ctx, model)
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+ tflog.Info(ctx, "SQLServer Flex instance read")
}
diff --git a/stackit/internal/services/sqlserverflexalpha/instance/datasources_gen/instance_data_source_gen.go b/stackit/internal/services/sqlserverflexalpha/instance/datasources_gen/instance_data_source_gen.go
deleted file mode 100644
index 5880a392..00000000
--- a/stackit/internal/services/sqlserverflexalpha/instance/datasources_gen/instance_data_source_gen.go
+++ /dev/null
@@ -1,1579 +0,0 @@
-// Code generated by terraform-plugin-framework-generator DO NOT EDIT.
-
-package sqlserverflexalpha
-
-import (
- "context"
- "fmt"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
- "github.com/hashicorp/terraform-plugin-go/tftypes"
- "strings"
-
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
-)
-
-func InstanceDataSourceSchema(ctx context.Context) schema.Schema {
- return schema.Schema{
- Attributes: map[string]schema.Attribute{
- "backup_schedule": schema.StringAttribute{
- Computed: true,
- Description: "The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.",
- MarkdownDescription: "The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.",
- },
- "edition": schema.StringAttribute{
- Computed: true,
- Description: "Edition of the MSSQL server instance",
- MarkdownDescription: "Edition of the MSSQL server instance",
- },
- "encryption": schema.SingleNestedAttribute{
- Attributes: map[string]schema.Attribute{
- "kek_key_id": schema.StringAttribute{
- Computed: true,
- Description: "The key identifier",
- MarkdownDescription: "The key identifier",
- },
- "kek_key_ring_id": schema.StringAttribute{
- Computed: true,
- Description: "The keyring identifier",
- MarkdownDescription: "The keyring identifier",
- },
- "kek_key_version": schema.StringAttribute{
- Computed: true,
- Description: "The key version",
- MarkdownDescription: "The key version",
- },
- "service_account": schema.StringAttribute{
- Computed: true,
- },
- },
- CustomType: EncryptionType{
- ObjectType: types.ObjectType{
- AttrTypes: EncryptionValue{}.AttributeTypes(ctx),
- },
- },
- Computed: true,
- Description: "this defines which key to use for storage encryption",
- MarkdownDescription: "this defines which key to use for storage encryption",
- },
- "flavor_id": schema.StringAttribute{
- Computed: true,
- Description: "The id of the instance flavor.",
- MarkdownDescription: "The id of the instance flavor.",
- },
- "tf_original_api_id": schema.StringAttribute{
- Computed: true,
- Description: "The ID of the instance.",
- MarkdownDescription: "The ID of the instance.",
- },
- "instance_id": schema.StringAttribute{
- Required: true,
- Description: "The ID of the instance.",
- MarkdownDescription: "The ID of the instance.",
- },
- "is_deletable": schema.BoolAttribute{
- Computed: true,
- Description: "Whether the instance can be deleted or not.",
- MarkdownDescription: "Whether the instance can be deleted or not.",
- },
- "name": schema.StringAttribute{
- Computed: true,
- Description: "The name of the instance.",
- MarkdownDescription: "The name of the instance.",
- },
- "network": schema.SingleNestedAttribute{
- Attributes: map[string]schema.Attribute{
- "access_scope": schema.StringAttribute{
- Computed: true,
- Description: "The network access scope of the instance\n\n⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.\n",
- MarkdownDescription: "The network access scope of the instance\n\n⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.\n",
- },
- "acl": schema.ListAttribute{
- ElementType: types.StringType,
- Computed: true,
- Description: "List of IPV4 cidr.",
- MarkdownDescription: "List of IPV4 cidr.",
- },
- "instance_address": schema.StringAttribute{
- Computed: true,
- },
- "router_address": schema.StringAttribute{
- Computed: true,
- },
- },
- CustomType: NetworkType{
- ObjectType: types.ObjectType{
- AttrTypes: NetworkValue{}.AttributeTypes(ctx),
- },
- },
- Computed: true,
- Description: "The access configuration of the instance",
- MarkdownDescription: "The access configuration of the instance",
- },
- "project_id": schema.StringAttribute{
- Required: true,
- Description: "The STACKIT project ID.",
- MarkdownDescription: "The STACKIT project ID.",
- },
- "region": schema.StringAttribute{
- Required: true,
- Description: "The region which should be addressed",
- MarkdownDescription: "The region which should be addressed",
- Validators: []validator.String{
- stringvalidator.OneOf(
- "eu01",
- ),
- },
- },
- "replicas": schema.Int64Attribute{
- Computed: true,
- Description: "How many replicas the instance should have.",
- MarkdownDescription: "How many replicas the instance should have.",
- },
- "retention_days": schema.Int64Attribute{
- Computed: true,
- Description: "The days for how long the backup files should be stored before cleaned up. 30 to 365",
- MarkdownDescription: "The days for how long the backup files should be stored before cleaned up. 30 to 365",
- },
- "status": schema.StringAttribute{
- Computed: true,
- },
- "storage": schema.SingleNestedAttribute{
- Attributes: map[string]schema.Attribute{
- "class": schema.StringAttribute{
- Computed: true,
- Description: "The storage class for the storage.",
- MarkdownDescription: "The storage class for the storage.",
- },
- "size": schema.Int64Attribute{
- Computed: true,
- Description: "The storage size in Gigabytes.",
- MarkdownDescription: "The storage size in Gigabytes.",
- },
- },
- CustomType: StorageType{
- ObjectType: types.ObjectType{
- AttrTypes: StorageValue{}.AttributeTypes(ctx),
- },
- },
- Computed: true,
- Description: "The object containing information about the storage size and class.",
- MarkdownDescription: "The object containing information about the storage size and class.",
- },
- "version": schema.StringAttribute{
- Computed: true,
- Description: "The sqlserver version used for the instance.",
- MarkdownDescription: "The sqlserver version used for the instance.",
- },
- },
- }
-}
-
-type InstanceModel struct {
- BackupSchedule types.String `tfsdk:"backup_schedule"`
- Edition types.String `tfsdk:"edition"`
- Encryption EncryptionValue `tfsdk:"encryption"`
- FlavorId types.String `tfsdk:"flavor_id"`
- Id types.String `tfsdk:"tf_original_api_id"`
- InstanceId types.String `tfsdk:"instance_id"`
- IsDeletable types.Bool `tfsdk:"is_deletable"`
- Name types.String `tfsdk:"name"`
- Network NetworkValue `tfsdk:"network"`
- ProjectId types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- Replicas types.Int64 `tfsdk:"replicas"`
- RetentionDays types.Int64 `tfsdk:"retention_days"`
- Status types.String `tfsdk:"status"`
- Storage StorageValue `tfsdk:"storage"`
- Version types.String `tfsdk:"version"`
-}
-
-var _ basetypes.ObjectTypable = EncryptionType{}
-
-type EncryptionType struct {
- basetypes.ObjectType
-}
-
-func (t EncryptionType) Equal(o attr.Type) bool {
- other, ok := o.(EncryptionType)
-
- if !ok {
- return false
- }
-
- return t.ObjectType.Equal(other.ObjectType)
-}
-
-func (t EncryptionType) String() string {
- return "EncryptionType"
-}
-
-func (t EncryptionType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributes := in.Attributes()
-
- kekKeyIdAttribute, ok := attributes["kek_key_id"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `kek_key_id is missing from object`)
-
- return nil, diags
- }
-
- kekKeyIdVal, ok := kekKeyIdAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`kek_key_id expected to be basetypes.StringValue, was: %T`, kekKeyIdAttribute))
- }
-
- kekKeyRingIdAttribute, ok := attributes["kek_key_ring_id"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `kek_key_ring_id is missing from object`)
-
- return nil, diags
- }
-
- kekKeyRingIdVal, ok := kekKeyRingIdAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`kek_key_ring_id expected to be basetypes.StringValue, was: %T`, kekKeyRingIdAttribute))
- }
-
- kekKeyVersionAttribute, ok := attributes["kek_key_version"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `kek_key_version is missing from object`)
-
- return nil, diags
- }
-
- kekKeyVersionVal, ok := kekKeyVersionAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`kek_key_version expected to be basetypes.StringValue, was: %T`, kekKeyVersionAttribute))
- }
-
- serviceAccountAttribute, ok := attributes["service_account"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `service_account is missing from object`)
-
- return nil, diags
- }
-
- serviceAccountVal, ok := serviceAccountAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`service_account expected to be basetypes.StringValue, was: %T`, serviceAccountAttribute))
- }
-
- if diags.HasError() {
- return nil, diags
- }
-
- return EncryptionValue{
- KekKeyId: kekKeyIdVal,
- KekKeyRingId: kekKeyRingIdVal,
- KekKeyVersion: kekKeyVersionVal,
- ServiceAccount: serviceAccountVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewEncryptionValueNull() EncryptionValue {
- return EncryptionValue{
- state: attr.ValueStateNull,
- }
-}
-
-func NewEncryptionValueUnknown() EncryptionValue {
- return EncryptionValue{
- state: attr.ValueStateUnknown,
- }
-}
-
-func NewEncryptionValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (EncryptionValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
- ctx := context.Background()
-
- for name, attributeType := range attributeTypes {
- attribute, ok := attributes[name]
-
- if !ok {
- diags.AddError(
- "Missing EncryptionValue Attribute Value",
- "While creating a EncryptionValue value, a missing attribute value was detected. "+
- "A EncryptionValue must contain values for all attributes, even if null or unknown. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("EncryptionValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
- )
-
- continue
- }
-
- if !attributeType.Equal(attribute.Type(ctx)) {
- diags.AddError(
- "Invalid EncryptionValue Attribute Type",
- "While creating a EncryptionValue value, an invalid attribute value was detected. "+
- "A EncryptionValue must use a matching attribute type for the value. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("EncryptionValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
- fmt.Sprintf("EncryptionValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
- )
- }
- }
-
- for name := range attributes {
- _, ok := attributeTypes[name]
-
- if !ok {
- diags.AddError(
- "Extra EncryptionValue Attribute Value",
- "While creating a EncryptionValue value, an extra attribute value was detected. "+
- "A EncryptionValue must not contain values beyond the expected attribute types. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("Extra EncryptionValue Attribute Name: %s", name),
- )
- }
- }
-
- if diags.HasError() {
- return NewEncryptionValueUnknown(), diags
- }
-
- kekKeyIdAttribute, ok := attributes["kek_key_id"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `kek_key_id is missing from object`)
-
- return NewEncryptionValueUnknown(), diags
- }
-
- kekKeyIdVal, ok := kekKeyIdAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`kek_key_id expected to be basetypes.StringValue, was: %T`, kekKeyIdAttribute))
- }
-
- kekKeyRingIdAttribute, ok := attributes["kek_key_ring_id"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `kek_key_ring_id is missing from object`)
-
- return NewEncryptionValueUnknown(), diags
- }
-
- kekKeyRingIdVal, ok := kekKeyRingIdAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`kek_key_ring_id expected to be basetypes.StringValue, was: %T`, kekKeyRingIdAttribute))
- }
-
- kekKeyVersionAttribute, ok := attributes["kek_key_version"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `kek_key_version is missing from object`)
-
- return NewEncryptionValueUnknown(), diags
- }
-
- kekKeyVersionVal, ok := kekKeyVersionAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`kek_key_version expected to be basetypes.StringValue, was: %T`, kekKeyVersionAttribute))
- }
-
- serviceAccountAttribute, ok := attributes["service_account"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `service_account is missing from object`)
-
- return NewEncryptionValueUnknown(), diags
- }
-
- serviceAccountVal, ok := serviceAccountAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`service_account expected to be basetypes.StringValue, was: %T`, serviceAccountAttribute))
- }
-
- if diags.HasError() {
- return NewEncryptionValueUnknown(), diags
- }
-
- return EncryptionValue{
- KekKeyId: kekKeyIdVal,
- KekKeyRingId: kekKeyRingIdVal,
- KekKeyVersion: kekKeyVersionVal,
- ServiceAccount: serviceAccountVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewEncryptionValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) EncryptionValue {
- object, diags := NewEncryptionValue(attributeTypes, attributes)
-
- if diags.HasError() {
- // This could potentially be added to the diag package.
- diagsStrings := make([]string, 0, len(diags))
-
- for _, diagnostic := range diags {
- diagsStrings = append(diagsStrings, fmt.Sprintf(
- "%s | %s | %s",
- diagnostic.Severity(),
- diagnostic.Summary(),
- diagnostic.Detail()))
- }
-
- panic("NewEncryptionValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
- }
-
- return object
-}
-
-func (t EncryptionType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
- if in.Type() == nil {
- return NewEncryptionValueNull(), nil
- }
-
- if !in.Type().Equal(t.TerraformType(ctx)) {
- return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
- }
-
- if !in.IsKnown() {
- return NewEncryptionValueUnknown(), nil
- }
-
- if in.IsNull() {
- return NewEncryptionValueNull(), nil
- }
-
- attributes := map[string]attr.Value{}
-
- val := map[string]tftypes.Value{}
-
- err := in.As(&val)
-
- if err != nil {
- return nil, err
- }
-
- for k, v := range val {
- a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
-
- if err != nil {
- return nil, err
- }
-
- attributes[k] = a
- }
-
- return NewEncryptionValueMust(EncryptionValue{}.AttributeTypes(ctx), attributes), nil
-}
-
-func (t EncryptionType) ValueType(ctx context.Context) attr.Value {
- return EncryptionValue{}
-}
-
-var _ basetypes.ObjectValuable = EncryptionValue{}
-
-type EncryptionValue struct {
- KekKeyId basetypes.StringValue `tfsdk:"kek_key_id"`
- KekKeyRingId basetypes.StringValue `tfsdk:"kek_key_ring_id"`
- KekKeyVersion basetypes.StringValue `tfsdk:"kek_key_version"`
- ServiceAccount basetypes.StringValue `tfsdk:"service_account"`
- state attr.ValueState
-}
-
-func (v EncryptionValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 4)
-
- var val tftypes.Value
- var err error
-
- attrTypes["kek_key_id"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["kek_key_ring_id"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["kek_key_version"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["service_account"] = basetypes.StringType{}.TerraformType(ctx)
-
- objectType := tftypes.Object{AttributeTypes: attrTypes}
-
- switch v.state {
- case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 4)
-
- val, err = v.KekKeyId.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["kek_key_id"] = val
-
- val, err = v.KekKeyRingId.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["kek_key_ring_id"] = val
-
- val, err = v.KekKeyVersion.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["kek_key_version"] = val
-
- val, err = v.ServiceAccount.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["service_account"] = val
-
- if err := tftypes.ValidateValue(objectType, vals); err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- return tftypes.NewValue(objectType, vals), nil
- case attr.ValueStateNull:
- return tftypes.NewValue(objectType, nil), nil
- case attr.ValueStateUnknown:
- return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
- default:
- panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
- }
-}
-
-func (v EncryptionValue) IsNull() bool {
- return v.state == attr.ValueStateNull
-}
-
-func (v EncryptionValue) IsUnknown() bool {
- return v.state == attr.ValueStateUnknown
-}
-
-func (v EncryptionValue) String() string {
- return "EncryptionValue"
-}
-
-func (v EncryptionValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributeTypes := map[string]attr.Type{
- "kek_key_id": basetypes.StringType{},
- "kek_key_ring_id": basetypes.StringType{},
- "kek_key_version": basetypes.StringType{},
- "service_account": basetypes.StringType{},
- }
-
- if v.IsNull() {
- return types.ObjectNull(attributeTypes), diags
- }
-
- if v.IsUnknown() {
- return types.ObjectUnknown(attributeTypes), diags
- }
-
- objVal, diags := types.ObjectValue(
- attributeTypes,
- map[string]attr.Value{
- "kek_key_id": v.KekKeyId,
- "kek_key_ring_id": v.KekKeyRingId,
- "kek_key_version": v.KekKeyVersion,
- "service_account": v.ServiceAccount,
- })
-
- return objVal, diags
-}
-
-func (v EncryptionValue) Equal(o attr.Value) bool {
- other, ok := o.(EncryptionValue)
-
- if !ok {
- return false
- }
-
- if v.state != other.state {
- return false
- }
-
- if v.state != attr.ValueStateKnown {
- return true
- }
-
- if !v.KekKeyId.Equal(other.KekKeyId) {
- return false
- }
-
- if !v.KekKeyRingId.Equal(other.KekKeyRingId) {
- return false
- }
-
- if !v.KekKeyVersion.Equal(other.KekKeyVersion) {
- return false
- }
-
- if !v.ServiceAccount.Equal(other.ServiceAccount) {
- return false
- }
-
- return true
-}
-
-func (v EncryptionValue) Type(ctx context.Context) attr.Type {
- return EncryptionType{
- basetypes.ObjectType{
- AttrTypes: v.AttributeTypes(ctx),
- },
- }
-}
-
-func (v EncryptionValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
- return map[string]attr.Type{
- "kek_key_id": basetypes.StringType{},
- "kek_key_ring_id": basetypes.StringType{},
- "kek_key_version": basetypes.StringType{},
- "service_account": basetypes.StringType{},
- }
-}
-
-var _ basetypes.ObjectTypable = NetworkType{}
-
-type NetworkType struct {
- basetypes.ObjectType
-}
-
-func (t NetworkType) Equal(o attr.Type) bool {
- other, ok := o.(NetworkType)
-
- if !ok {
- return false
- }
-
- return t.ObjectType.Equal(other.ObjectType)
-}
-
-func (t NetworkType) String() string {
- return "NetworkType"
-}
-
-func (t NetworkType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributes := in.Attributes()
-
- accessScopeAttribute, ok := attributes["access_scope"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `access_scope is missing from object`)
-
- return nil, diags
- }
-
- accessScopeVal, ok := accessScopeAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`access_scope expected to be basetypes.StringValue, was: %T`, accessScopeAttribute))
- }
-
- aclAttribute, ok := attributes["acl"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `acl is missing from object`)
-
- return nil, diags
- }
-
- aclVal, ok := aclAttribute.(basetypes.ListValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`acl expected to be basetypes.ListValue, was: %T`, aclAttribute))
- }
-
- instanceAddressAttribute, ok := attributes["instance_address"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `instance_address is missing from object`)
-
- return nil, diags
- }
-
- instanceAddressVal, ok := instanceAddressAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`instance_address expected to be basetypes.StringValue, was: %T`, instanceAddressAttribute))
- }
-
- routerAddressAttribute, ok := attributes["router_address"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `router_address is missing from object`)
-
- return nil, diags
- }
-
- routerAddressVal, ok := routerAddressAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`router_address expected to be basetypes.StringValue, was: %T`, routerAddressAttribute))
- }
-
- if diags.HasError() {
- return nil, diags
- }
-
- return NetworkValue{
- AccessScope: accessScopeVal,
- Acl: aclVal,
- InstanceAddress: instanceAddressVal,
- RouterAddress: routerAddressVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewNetworkValueNull() NetworkValue {
- return NetworkValue{
- state: attr.ValueStateNull,
- }
-}
-
-func NewNetworkValueUnknown() NetworkValue {
- return NetworkValue{
- state: attr.ValueStateUnknown,
- }
-}
-
-func NewNetworkValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (NetworkValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
- ctx := context.Background()
-
- for name, attributeType := range attributeTypes {
- attribute, ok := attributes[name]
-
- if !ok {
- diags.AddError(
- "Missing NetworkValue Attribute Value",
- "While creating a NetworkValue value, a missing attribute value was detected. "+
- "A NetworkValue must contain values for all attributes, even if null or unknown. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("NetworkValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
- )
-
- continue
- }
-
- if !attributeType.Equal(attribute.Type(ctx)) {
- diags.AddError(
- "Invalid NetworkValue Attribute Type",
- "While creating a NetworkValue value, an invalid attribute value was detected. "+
- "A NetworkValue must use a matching attribute type for the value. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("NetworkValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
- fmt.Sprintf("NetworkValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
- )
- }
- }
-
- for name := range attributes {
- _, ok := attributeTypes[name]
-
- if !ok {
- diags.AddError(
- "Extra NetworkValue Attribute Value",
- "While creating a NetworkValue value, an extra attribute value was detected. "+
- "A NetworkValue must not contain values beyond the expected attribute types. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("Extra NetworkValue Attribute Name: %s", name),
- )
- }
- }
-
- if diags.HasError() {
- return NewNetworkValueUnknown(), diags
- }
-
- accessScopeAttribute, ok := attributes["access_scope"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `access_scope is missing from object`)
-
- return NewNetworkValueUnknown(), diags
- }
-
- accessScopeVal, ok := accessScopeAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`access_scope expected to be basetypes.StringValue, was: %T`, accessScopeAttribute))
- }
-
- aclAttribute, ok := attributes["acl"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `acl is missing from object`)
-
- return NewNetworkValueUnknown(), diags
- }
-
- aclVal, ok := aclAttribute.(basetypes.ListValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`acl expected to be basetypes.ListValue, was: %T`, aclAttribute))
- }
-
- instanceAddressAttribute, ok := attributes["instance_address"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `instance_address is missing from object`)
-
- return NewNetworkValueUnknown(), diags
- }
-
- instanceAddressVal, ok := instanceAddressAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`instance_address expected to be basetypes.StringValue, was: %T`, instanceAddressAttribute))
- }
-
- routerAddressAttribute, ok := attributes["router_address"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `router_address is missing from object`)
-
- return NewNetworkValueUnknown(), diags
- }
-
- routerAddressVal, ok := routerAddressAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`router_address expected to be basetypes.StringValue, was: %T`, routerAddressAttribute))
- }
-
- if diags.HasError() {
- return NewNetworkValueUnknown(), diags
- }
-
- return NetworkValue{
- AccessScope: accessScopeVal,
- Acl: aclVal,
- InstanceAddress: instanceAddressVal,
- RouterAddress: routerAddressVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewNetworkValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) NetworkValue {
- object, diags := NewNetworkValue(attributeTypes, attributes)
-
- if diags.HasError() {
- // This could potentially be added to the diag package.
- diagsStrings := make([]string, 0, len(diags))
-
- for _, diagnostic := range diags {
- diagsStrings = append(diagsStrings, fmt.Sprintf(
- "%s | %s | %s",
- diagnostic.Severity(),
- diagnostic.Summary(),
- diagnostic.Detail()))
- }
-
- panic("NewNetworkValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
- }
-
- return object
-}
-
-func (t NetworkType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
- if in.Type() == nil {
- return NewNetworkValueNull(), nil
- }
-
- if !in.Type().Equal(t.TerraformType(ctx)) {
- return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
- }
-
- if !in.IsKnown() {
- return NewNetworkValueUnknown(), nil
- }
-
- if in.IsNull() {
- return NewNetworkValueNull(), nil
- }
-
- attributes := map[string]attr.Value{}
-
- val := map[string]tftypes.Value{}
-
- err := in.As(&val)
-
- if err != nil {
- return nil, err
- }
-
- for k, v := range val {
- a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
-
- if err != nil {
- return nil, err
- }
-
- attributes[k] = a
- }
-
- return NewNetworkValueMust(NetworkValue{}.AttributeTypes(ctx), attributes), nil
-}
-
-func (t NetworkType) ValueType(ctx context.Context) attr.Value {
- return NetworkValue{}
-}
-
-var _ basetypes.ObjectValuable = NetworkValue{}
-
-type NetworkValue struct {
- AccessScope basetypes.StringValue `tfsdk:"access_scope"`
- Acl basetypes.ListValue `tfsdk:"acl"`
- InstanceAddress basetypes.StringValue `tfsdk:"instance_address"`
- RouterAddress basetypes.StringValue `tfsdk:"router_address"`
- state attr.ValueState
-}
-
-func (v NetworkValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 4)
-
- var val tftypes.Value
- var err error
-
- attrTypes["access_scope"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["acl"] = basetypes.ListType{
- ElemType: types.StringType,
- }.TerraformType(ctx)
- attrTypes["instance_address"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["router_address"] = basetypes.StringType{}.TerraformType(ctx)
-
- objectType := tftypes.Object{AttributeTypes: attrTypes}
-
- switch v.state {
- case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 4)
-
- val, err = v.AccessScope.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["access_scope"] = val
-
- val, err = v.Acl.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["acl"] = val
-
- val, err = v.InstanceAddress.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["instance_address"] = val
-
- val, err = v.RouterAddress.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["router_address"] = val
-
- if err := tftypes.ValidateValue(objectType, vals); err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- return tftypes.NewValue(objectType, vals), nil
- case attr.ValueStateNull:
- return tftypes.NewValue(objectType, nil), nil
- case attr.ValueStateUnknown:
- return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
- default:
- panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
- }
-}
-
-func (v NetworkValue) IsNull() bool {
- return v.state == attr.ValueStateNull
-}
-
-func (v NetworkValue) IsUnknown() bool {
- return v.state == attr.ValueStateUnknown
-}
-
-func (v NetworkValue) String() string {
- return "NetworkValue"
-}
-
-func (v NetworkValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- var aclVal basetypes.ListValue
- switch {
- case v.Acl.IsUnknown():
- aclVal = types.ListUnknown(types.StringType)
- case v.Acl.IsNull():
- aclVal = types.ListNull(types.StringType)
- default:
- var d diag.Diagnostics
- aclVal, d = types.ListValue(types.StringType, v.Acl.Elements())
- diags.Append(d...)
- }
-
- if diags.HasError() {
- return types.ObjectUnknown(map[string]attr.Type{
- "access_scope": basetypes.StringType{},
- "acl": basetypes.ListType{
- ElemType: types.StringType,
- },
- "instance_address": basetypes.StringType{},
- "router_address": basetypes.StringType{},
- }), diags
- }
-
- attributeTypes := map[string]attr.Type{
- "access_scope": basetypes.StringType{},
- "acl": basetypes.ListType{
- ElemType: types.StringType,
- },
- "instance_address": basetypes.StringType{},
- "router_address": basetypes.StringType{},
- }
-
- if v.IsNull() {
- return types.ObjectNull(attributeTypes), diags
- }
-
- if v.IsUnknown() {
- return types.ObjectUnknown(attributeTypes), diags
- }
-
- objVal, diags := types.ObjectValue(
- attributeTypes,
- map[string]attr.Value{
- "access_scope": v.AccessScope,
- "acl": aclVal,
- "instance_address": v.InstanceAddress,
- "router_address": v.RouterAddress,
- })
-
- return objVal, diags
-}
-
-func (v NetworkValue) Equal(o attr.Value) bool {
- other, ok := o.(NetworkValue)
-
- if !ok {
- return false
- }
-
- if v.state != other.state {
- return false
- }
-
- if v.state != attr.ValueStateKnown {
- return true
- }
-
- if !v.AccessScope.Equal(other.AccessScope) {
- return false
- }
-
- if !v.Acl.Equal(other.Acl) {
- return false
- }
-
- if !v.InstanceAddress.Equal(other.InstanceAddress) {
- return false
- }
-
- if !v.RouterAddress.Equal(other.RouterAddress) {
- return false
- }
-
- return true
-}
-
-func (v NetworkValue) Type(ctx context.Context) attr.Type {
- return NetworkType{
- basetypes.ObjectType{
- AttrTypes: v.AttributeTypes(ctx),
- },
- }
-}
-
-func (v NetworkValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
- return map[string]attr.Type{
- "access_scope": basetypes.StringType{},
- "acl": basetypes.ListType{
- ElemType: types.StringType,
- },
- "instance_address": basetypes.StringType{},
- "router_address": basetypes.StringType{},
- }
-}
-
-var _ basetypes.ObjectTypable = StorageType{}
-
-type StorageType struct {
- basetypes.ObjectType
-}
-
-func (t StorageType) Equal(o attr.Type) bool {
- other, ok := o.(StorageType)
-
- if !ok {
- return false
- }
-
- return t.ObjectType.Equal(other.ObjectType)
-}
-
-func (t StorageType) String() string {
- return "StorageType"
-}
-
-func (t StorageType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributes := in.Attributes()
-
- classAttribute, ok := attributes["class"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `class is missing from object`)
-
- return nil, diags
- }
-
- classVal, ok := classAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute))
- }
-
- sizeAttribute, ok := attributes["size"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `size is missing from object`)
-
- return nil, diags
- }
-
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
- }
-
- if diags.HasError() {
- return nil, diags
- }
-
- return StorageValue{
- Class: classVal,
- Size: sizeVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewStorageValueNull() StorageValue {
- return StorageValue{
- state: attr.ValueStateNull,
- }
-}
-
-func NewStorageValueUnknown() StorageValue {
- return StorageValue{
- state: attr.ValueStateUnknown,
- }
-}
-
-func NewStorageValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (StorageValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
- ctx := context.Background()
-
- for name, attributeType := range attributeTypes {
- attribute, ok := attributes[name]
-
- if !ok {
- diags.AddError(
- "Missing StorageValue Attribute Value",
- "While creating a StorageValue value, a missing attribute value was detected. "+
- "A StorageValue must contain values for all attributes, even if null or unknown. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("StorageValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
- )
-
- continue
- }
-
- if !attributeType.Equal(attribute.Type(ctx)) {
- diags.AddError(
- "Invalid StorageValue Attribute Type",
- "While creating a StorageValue value, an invalid attribute value was detected. "+
- "A StorageValue must use a matching attribute type for the value. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("StorageValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
- fmt.Sprintf("StorageValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
- )
- }
- }
-
- for name := range attributes {
- _, ok := attributeTypes[name]
-
- if !ok {
- diags.AddError(
- "Extra StorageValue Attribute Value",
- "While creating a StorageValue value, an extra attribute value was detected. "+
- "A StorageValue must not contain values beyond the expected attribute types. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("Extra StorageValue Attribute Name: %s", name),
- )
- }
- }
-
- if diags.HasError() {
- return NewStorageValueUnknown(), diags
- }
-
- classAttribute, ok := attributes["class"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `class is missing from object`)
-
- return NewStorageValueUnknown(), diags
- }
-
- classVal, ok := classAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute))
- }
-
- sizeAttribute, ok := attributes["size"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `size is missing from object`)
-
- return NewStorageValueUnknown(), diags
- }
-
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
- }
-
- if diags.HasError() {
- return NewStorageValueUnknown(), diags
- }
-
- return StorageValue{
- Class: classVal,
- Size: sizeVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewStorageValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) StorageValue {
- object, diags := NewStorageValue(attributeTypes, attributes)
-
- if diags.HasError() {
- // This could potentially be added to the diag package.
- diagsStrings := make([]string, 0, len(diags))
-
- for _, diagnostic := range diags {
- diagsStrings = append(diagsStrings, fmt.Sprintf(
- "%s | %s | %s",
- diagnostic.Severity(),
- diagnostic.Summary(),
- diagnostic.Detail()))
- }
-
- panic("NewStorageValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
- }
-
- return object
-}
-
-func (t StorageType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
- if in.Type() == nil {
- return NewStorageValueNull(), nil
- }
-
- if !in.Type().Equal(t.TerraformType(ctx)) {
- return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
- }
-
- if !in.IsKnown() {
- return NewStorageValueUnknown(), nil
- }
-
- if in.IsNull() {
- return NewStorageValueNull(), nil
- }
-
- attributes := map[string]attr.Value{}
-
- val := map[string]tftypes.Value{}
-
- err := in.As(&val)
-
- if err != nil {
- return nil, err
- }
-
- for k, v := range val {
- a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
-
- if err != nil {
- return nil, err
- }
-
- attributes[k] = a
- }
-
- return NewStorageValueMust(StorageValue{}.AttributeTypes(ctx), attributes), nil
-}
-
-func (t StorageType) ValueType(ctx context.Context) attr.Value {
- return StorageValue{}
-}
-
-var _ basetypes.ObjectValuable = StorageValue{}
-
-type StorageValue struct {
- Class basetypes.StringValue `tfsdk:"class"`
- Size basetypes.Int64Value `tfsdk:"size"`
- state attr.ValueState
-}
-
-func (v StorageValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 2)
-
- var val tftypes.Value
- var err error
-
- attrTypes["class"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
-
- objectType := tftypes.Object{AttributeTypes: attrTypes}
-
- switch v.state {
- case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 2)
-
- val, err = v.Class.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["class"] = val
-
- val, err = v.Size.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["size"] = val
-
- if err := tftypes.ValidateValue(objectType, vals); err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- return tftypes.NewValue(objectType, vals), nil
- case attr.ValueStateNull:
- return tftypes.NewValue(objectType, nil), nil
- case attr.ValueStateUnknown:
- return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
- default:
- panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
- }
-}
-
-func (v StorageValue) IsNull() bool {
- return v.state == attr.ValueStateNull
-}
-
-func (v StorageValue) IsUnknown() bool {
- return v.state == attr.ValueStateUnknown
-}
-
-func (v StorageValue) String() string {
- return "StorageValue"
-}
-
-func (v StorageValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributeTypes := map[string]attr.Type{
- "class": basetypes.StringType{},
- "size": basetypes.Int64Type{},
- }
-
- if v.IsNull() {
- return types.ObjectNull(attributeTypes), diags
- }
-
- if v.IsUnknown() {
- return types.ObjectUnknown(attributeTypes), diags
- }
-
- objVal, diags := types.ObjectValue(
- attributeTypes,
- map[string]attr.Value{
- "class": v.Class,
- "size": v.Size,
- })
-
- return objVal, diags
-}
-
-func (v StorageValue) Equal(o attr.Value) bool {
- other, ok := o.(StorageValue)
-
- if !ok {
- return false
- }
-
- if v.state != other.state {
- return false
- }
-
- if v.state != attr.ValueStateKnown {
- return true
- }
-
- if !v.Class.Equal(other.Class) {
- return false
- }
-
- if !v.Size.Equal(other.Size) {
- return false
- }
-
- return true
-}
-
-func (v StorageValue) Type(ctx context.Context) attr.Type {
- return StorageType{
- basetypes.ObjectType{
- AttrTypes: v.AttributeTypes(ctx),
- },
- }
-}
-
-func (v StorageValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
- return map[string]attr.Type{
- "class": basetypes.StringType{},
- "size": basetypes.Int64Type{},
- }
-}
diff --git a/stackit/internal/services/sqlserverflexalpha/instance/datasources_gen/instances_data_source_gen.go b/stackit/internal/services/sqlserverflexalpha/instance/datasources_gen/instances_data_source_gen.go
deleted file mode 100644
index 33df0a5d..00000000
--- a/stackit/internal/services/sqlserverflexalpha/instance/datasources_gen/instances_data_source_gen.go
+++ /dev/null
@@ -1,1172 +0,0 @@
-// Code generated by terraform-plugin-framework-generator DO NOT EDIT.
-
-package sqlserverflexalpha
-
-import (
- "context"
- "fmt"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
- "github.com/hashicorp/terraform-plugin-go/tftypes"
- "strings"
-
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
-)
-
-func InstancesDataSourceSchema(ctx context.Context) schema.Schema {
- return schema.Schema{
- Attributes: map[string]schema.Attribute{
- "instances": schema.ListNestedAttribute{
- NestedObject: schema.NestedAttributeObject{
- Attributes: map[string]schema.Attribute{
- "id": schema.StringAttribute{
- Computed: true,
- Description: "The ID of the instance.",
- MarkdownDescription: "The ID of the instance.",
- },
- "is_deletable": schema.BoolAttribute{
- Computed: true,
- Description: "Whether the instance can be deleted or not.",
- MarkdownDescription: "Whether the instance can be deleted or not.",
- },
- "name": schema.StringAttribute{
- Computed: true,
- Description: "The name of the instance.",
- MarkdownDescription: "The name of the instance.",
- },
- "status": schema.StringAttribute{
- Computed: true,
- },
- },
- CustomType: InstancesType{
- ObjectType: types.ObjectType{
- AttrTypes: InstancesValue{}.AttributeTypes(ctx),
- },
- },
- },
- Computed: true,
- Description: "List of owned instances and their current status.",
- MarkdownDescription: "List of owned instances and their current status.",
- },
- "page": schema.Int64Attribute{
- Optional: true,
- Computed: true,
- Description: "Number of the page of items list to be returned.",
- MarkdownDescription: "Number of the page of items list to be returned.",
- },
- "pagination": schema.SingleNestedAttribute{
- Attributes: map[string]schema.Attribute{
- "page": schema.Int64Attribute{
- Computed: true,
- },
- "size": schema.Int64Attribute{
- Computed: true,
- },
- "sort": schema.StringAttribute{
- Computed: true,
- },
- "total_pages": schema.Int64Attribute{
- Computed: true,
- },
- "total_rows": schema.Int64Attribute{
- Computed: true,
- },
- },
- CustomType: PaginationType{
- ObjectType: types.ObjectType{
- AttrTypes: PaginationValue{}.AttributeTypes(ctx),
- },
- },
- Computed: true,
- },
- "project_id": schema.StringAttribute{
- Required: true,
- Description: "The STACKIT project ID.",
- MarkdownDescription: "The STACKIT project ID.",
- },
- "region": schema.StringAttribute{
- Required: true,
- Description: "The region which should be addressed",
- MarkdownDescription: "The region which should be addressed",
- Validators: []validator.String{
- stringvalidator.OneOf(
- "eu01",
- ),
- },
- },
- "size": schema.Int64Attribute{
- Optional: true,
- Computed: true,
- Description: "Number of items to be returned on each page.",
- MarkdownDescription: "Number of items to be returned on each page.",
- },
- "sort": schema.StringAttribute{
- Optional: true,
- Computed: true,
- Description: "Sorting of the items to be returned on each page.",
- MarkdownDescription: "Sorting of the items to be returned on each page.",
- Validators: []validator.String{
- stringvalidator.OneOf(
- "index.desc",
- "index.asc",
- "id.desc",
- "id.asc",
- "is_deletable.desc",
- "is_deletable.asc",
- "name.asc",
- "name.desc",
- "status.asc",
- "status.desc",
- ),
- },
- },
- },
- }
-}
-
-type InstancesModel struct {
- Instances types.List `tfsdk:"instances"`
- Page types.Int64 `tfsdk:"page"`
- Pagination PaginationValue `tfsdk:"pagination"`
- ProjectId types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- Size types.Int64 `tfsdk:"size"`
- Sort types.String `tfsdk:"sort"`
-}
-
-var _ basetypes.ObjectTypable = InstancesType{}
-
-type InstancesType struct {
- basetypes.ObjectType
-}
-
-func (t InstancesType) Equal(o attr.Type) bool {
- other, ok := o.(InstancesType)
-
- if !ok {
- return false
- }
-
- return t.ObjectType.Equal(other.ObjectType)
-}
-
-func (t InstancesType) String() string {
- return "InstancesType"
-}
-
-func (t InstancesType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributes := in.Attributes()
-
- idAttribute, ok := attributes["id"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `id is missing from object`)
-
- return nil, diags
- }
-
- idVal, ok := idAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`id expected to be basetypes.StringValue, was: %T`, idAttribute))
- }
-
- isDeletableAttribute, ok := attributes["is_deletable"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `is_deletable is missing from object`)
-
- return nil, diags
- }
-
- isDeletableVal, ok := isDeletableAttribute.(basetypes.BoolValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`is_deletable expected to be basetypes.BoolValue, was: %T`, isDeletableAttribute))
- }
-
- nameAttribute, ok := attributes["name"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `name is missing from object`)
-
- return nil, diags
- }
-
- nameVal, ok := nameAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`name expected to be basetypes.StringValue, was: %T`, nameAttribute))
- }
-
- statusAttribute, ok := attributes["status"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `status is missing from object`)
-
- return nil, diags
- }
-
- statusVal, ok := statusAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`status expected to be basetypes.StringValue, was: %T`, statusAttribute))
- }
-
- if diags.HasError() {
- return nil, diags
- }
-
- return InstancesValue{
- Id: idVal,
- IsDeletable: isDeletableVal,
- Name: nameVal,
- Status: statusVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewInstancesValueNull() InstancesValue {
- return InstancesValue{
- state: attr.ValueStateNull,
- }
-}
-
-func NewInstancesValueUnknown() InstancesValue {
- return InstancesValue{
- state: attr.ValueStateUnknown,
- }
-}
-
-func NewInstancesValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (InstancesValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
- ctx := context.Background()
-
- for name, attributeType := range attributeTypes {
- attribute, ok := attributes[name]
-
- if !ok {
- diags.AddError(
- "Missing InstancesValue Attribute Value",
- "While creating a InstancesValue value, a missing attribute value was detected. "+
- "A InstancesValue must contain values for all attributes, even if null or unknown. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("InstancesValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
- )
-
- continue
- }
-
- if !attributeType.Equal(attribute.Type(ctx)) {
- diags.AddError(
- "Invalid InstancesValue Attribute Type",
- "While creating a InstancesValue value, an invalid attribute value was detected. "+
- "A InstancesValue must use a matching attribute type for the value. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("InstancesValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
- fmt.Sprintf("InstancesValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
- )
- }
- }
-
- for name := range attributes {
- _, ok := attributeTypes[name]
-
- if !ok {
- diags.AddError(
- "Extra InstancesValue Attribute Value",
- "While creating a InstancesValue value, an extra attribute value was detected. "+
- "A InstancesValue must not contain values beyond the expected attribute types. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("Extra InstancesValue Attribute Name: %s", name),
- )
- }
- }
-
- if diags.HasError() {
- return NewInstancesValueUnknown(), diags
- }
-
- idAttribute, ok := attributes["id"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `id is missing from object`)
-
- return NewInstancesValueUnknown(), diags
- }
-
- idVal, ok := idAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`id expected to be basetypes.StringValue, was: %T`, idAttribute))
- }
-
- isDeletableAttribute, ok := attributes["is_deletable"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `is_deletable is missing from object`)
-
- return NewInstancesValueUnknown(), diags
- }
-
- isDeletableVal, ok := isDeletableAttribute.(basetypes.BoolValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`is_deletable expected to be basetypes.BoolValue, was: %T`, isDeletableAttribute))
- }
-
- nameAttribute, ok := attributes["name"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `name is missing from object`)
-
- return NewInstancesValueUnknown(), diags
- }
-
- nameVal, ok := nameAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`name expected to be basetypes.StringValue, was: %T`, nameAttribute))
- }
-
- statusAttribute, ok := attributes["status"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `status is missing from object`)
-
- return NewInstancesValueUnknown(), diags
- }
-
- statusVal, ok := statusAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`status expected to be basetypes.StringValue, was: %T`, statusAttribute))
- }
-
- if diags.HasError() {
- return NewInstancesValueUnknown(), diags
- }
-
- return InstancesValue{
- Id: idVal,
- IsDeletable: isDeletableVal,
- Name: nameVal,
- Status: statusVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewInstancesValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) InstancesValue {
- object, diags := NewInstancesValue(attributeTypes, attributes)
-
- if diags.HasError() {
- // This could potentially be added to the diag package.
- diagsStrings := make([]string, 0, len(diags))
-
- for _, diagnostic := range diags {
- diagsStrings = append(diagsStrings, fmt.Sprintf(
- "%s | %s | %s",
- diagnostic.Severity(),
- diagnostic.Summary(),
- diagnostic.Detail()))
- }
-
- panic("NewInstancesValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
- }
-
- return object
-}
-
-func (t InstancesType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
- if in.Type() == nil {
- return NewInstancesValueNull(), nil
- }
-
- if !in.Type().Equal(t.TerraformType(ctx)) {
- return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
- }
-
- if !in.IsKnown() {
- return NewInstancesValueUnknown(), nil
- }
-
- if in.IsNull() {
- return NewInstancesValueNull(), nil
- }
-
- attributes := map[string]attr.Value{}
-
- val := map[string]tftypes.Value{}
-
- err := in.As(&val)
-
- if err != nil {
- return nil, err
- }
-
- for k, v := range val {
- a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
-
- if err != nil {
- return nil, err
- }
-
- attributes[k] = a
- }
-
- return NewInstancesValueMust(InstancesValue{}.AttributeTypes(ctx), attributes), nil
-}
-
-func (t InstancesType) ValueType(ctx context.Context) attr.Value {
- return InstancesValue{}
-}
-
-var _ basetypes.ObjectValuable = InstancesValue{}
-
-type InstancesValue struct {
- Id basetypes.StringValue `tfsdk:"id"`
- IsDeletable basetypes.BoolValue `tfsdk:"is_deletable"`
- Name basetypes.StringValue `tfsdk:"name"`
- Status basetypes.StringValue `tfsdk:"status"`
- state attr.ValueState
-}
-
-func (v InstancesValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 4)
-
- var val tftypes.Value
- var err error
-
- attrTypes["id"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["is_deletable"] = basetypes.BoolType{}.TerraformType(ctx)
- attrTypes["name"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["status"] = basetypes.StringType{}.TerraformType(ctx)
-
- objectType := tftypes.Object{AttributeTypes: attrTypes}
-
- switch v.state {
- case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 4)
-
- val, err = v.Id.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["id"] = val
-
- val, err = v.IsDeletable.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["is_deletable"] = val
-
- val, err = v.Name.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["name"] = val
-
- val, err = v.Status.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["status"] = val
-
- if err := tftypes.ValidateValue(objectType, vals); err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- return tftypes.NewValue(objectType, vals), nil
- case attr.ValueStateNull:
- return tftypes.NewValue(objectType, nil), nil
- case attr.ValueStateUnknown:
- return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
- default:
- panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
- }
-}
-
-func (v InstancesValue) IsNull() bool {
- return v.state == attr.ValueStateNull
-}
-
-func (v InstancesValue) IsUnknown() bool {
- return v.state == attr.ValueStateUnknown
-}
-
-func (v InstancesValue) String() string {
- return "InstancesValue"
-}
-
-func (v InstancesValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributeTypes := map[string]attr.Type{
- "id": basetypes.StringType{},
- "is_deletable": basetypes.BoolType{},
- "name": basetypes.StringType{},
- "status": basetypes.StringType{},
- }
-
- if v.IsNull() {
- return types.ObjectNull(attributeTypes), diags
- }
-
- if v.IsUnknown() {
- return types.ObjectUnknown(attributeTypes), diags
- }
-
- objVal, diags := types.ObjectValue(
- attributeTypes,
- map[string]attr.Value{
- "id": v.Id,
- "is_deletable": v.IsDeletable,
- "name": v.Name,
- "status": v.Status,
- })
-
- return objVal, diags
-}
-
-func (v InstancesValue) Equal(o attr.Value) bool {
- other, ok := o.(InstancesValue)
-
- if !ok {
- return false
- }
-
- if v.state != other.state {
- return false
- }
-
- if v.state != attr.ValueStateKnown {
- return true
- }
-
- if !v.Id.Equal(other.Id) {
- return false
- }
-
- if !v.IsDeletable.Equal(other.IsDeletable) {
- return false
- }
-
- if !v.Name.Equal(other.Name) {
- return false
- }
-
- if !v.Status.Equal(other.Status) {
- return false
- }
-
- return true
-}
-
-func (v InstancesValue) Type(ctx context.Context) attr.Type {
- return InstancesType{
- basetypes.ObjectType{
- AttrTypes: v.AttributeTypes(ctx),
- },
- }
-}
-
-func (v InstancesValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
- return map[string]attr.Type{
- "id": basetypes.StringType{},
- "is_deletable": basetypes.BoolType{},
- "name": basetypes.StringType{},
- "status": basetypes.StringType{},
- }
-}
-
-var _ basetypes.ObjectTypable = PaginationType{}
-
-type PaginationType struct {
- basetypes.ObjectType
-}
-
-func (t PaginationType) Equal(o attr.Type) bool {
- other, ok := o.(PaginationType)
-
- if !ok {
- return false
- }
-
- return t.ObjectType.Equal(other.ObjectType)
-}
-
-func (t PaginationType) String() string {
- return "PaginationType"
-}
-
-func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributes := in.Attributes()
-
- pageAttribute, ok := attributes["page"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `page is missing from object`)
-
- return nil, diags
- }
-
- pageVal, ok := pageAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
- }
-
- sizeAttribute, ok := attributes["size"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `size is missing from object`)
-
- return nil, diags
- }
-
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
- }
-
- sortAttribute, ok := attributes["sort"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `sort is missing from object`)
-
- return nil, diags
- }
-
- sortVal, ok := sortAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute))
- }
-
- totalPagesAttribute, ok := attributes["total_pages"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `total_pages is missing from object`)
-
- return nil, diags
- }
-
- totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
- }
-
- totalRowsAttribute, ok := attributes["total_rows"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `total_rows is missing from object`)
-
- return nil, diags
- }
-
- totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
- }
-
- if diags.HasError() {
- return nil, diags
- }
-
- return PaginationValue{
- Page: pageVal,
- Size: sizeVal,
- Sort: sortVal,
- TotalPages: totalPagesVal,
- TotalRows: totalRowsVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewPaginationValueNull() PaginationValue {
- return PaginationValue{
- state: attr.ValueStateNull,
- }
-}
-
-func NewPaginationValueUnknown() PaginationValue {
- return PaginationValue{
- state: attr.ValueStateUnknown,
- }
-}
-
-func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (PaginationValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
- ctx := context.Background()
-
- for name, attributeType := range attributeTypes {
- attribute, ok := attributes[name]
-
- if !ok {
- diags.AddError(
- "Missing PaginationValue Attribute Value",
- "While creating a PaginationValue value, a missing attribute value was detected. "+
- "A PaginationValue must contain values for all attributes, even if null or unknown. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
- )
-
- continue
- }
-
- if !attributeType.Equal(attribute.Type(ctx)) {
- diags.AddError(
- "Invalid PaginationValue Attribute Type",
- "While creating a PaginationValue value, an invalid attribute value was detected. "+
- "A PaginationValue must use a matching attribute type for the value. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
- fmt.Sprintf("PaginationValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
- )
- }
- }
-
- for name := range attributes {
- _, ok := attributeTypes[name]
-
- if !ok {
- diags.AddError(
- "Extra PaginationValue Attribute Value",
- "While creating a PaginationValue value, an extra attribute value was detected. "+
- "A PaginationValue must not contain values beyond the expected attribute types. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("Extra PaginationValue Attribute Name: %s", name),
- )
- }
- }
-
- if diags.HasError() {
- return NewPaginationValueUnknown(), diags
- }
-
- pageAttribute, ok := attributes["page"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `page is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- pageVal, ok := pageAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
- }
-
- sizeAttribute, ok := attributes["size"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `size is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
- }
-
- sortAttribute, ok := attributes["sort"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `sort is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- sortVal, ok := sortAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute))
- }
-
- totalPagesAttribute, ok := attributes["total_pages"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `total_pages is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
- }
-
- totalRowsAttribute, ok := attributes["total_rows"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `total_rows is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
- }
-
- if diags.HasError() {
- return NewPaginationValueUnknown(), diags
- }
-
- return PaginationValue{
- Page: pageVal,
- Size: sizeVal,
- Sort: sortVal,
- TotalPages: totalPagesVal,
- TotalRows: totalRowsVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewPaginationValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) PaginationValue {
- object, diags := NewPaginationValue(attributeTypes, attributes)
-
- if diags.HasError() {
- // This could potentially be added to the diag package.
- diagsStrings := make([]string, 0, len(diags))
-
- for _, diagnostic := range diags {
- diagsStrings = append(diagsStrings, fmt.Sprintf(
- "%s | %s | %s",
- diagnostic.Severity(),
- diagnostic.Summary(),
- diagnostic.Detail()))
- }
-
- panic("NewPaginationValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
- }
-
- return object
-}
-
-func (t PaginationType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
- if in.Type() == nil {
- return NewPaginationValueNull(), nil
- }
-
- if !in.Type().Equal(t.TerraformType(ctx)) {
- return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
- }
-
- if !in.IsKnown() {
- return NewPaginationValueUnknown(), nil
- }
-
- if in.IsNull() {
- return NewPaginationValueNull(), nil
- }
-
- attributes := map[string]attr.Value{}
-
- val := map[string]tftypes.Value{}
-
- err := in.As(&val)
-
- if err != nil {
- return nil, err
- }
-
- for k, v := range val {
- a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
-
- if err != nil {
- return nil, err
- }
-
- attributes[k] = a
- }
-
- return NewPaginationValueMust(PaginationValue{}.AttributeTypes(ctx), attributes), nil
-}
-
-func (t PaginationType) ValueType(ctx context.Context) attr.Value {
- return PaginationValue{}
-}
-
-var _ basetypes.ObjectValuable = PaginationValue{}
-
-type PaginationValue struct {
- Page basetypes.Int64Value `tfsdk:"page"`
- Size basetypes.Int64Value `tfsdk:"size"`
- Sort basetypes.StringValue `tfsdk:"sort"`
- TotalPages basetypes.Int64Value `tfsdk:"total_pages"`
- TotalRows basetypes.Int64Value `tfsdk:"total_rows"`
- state attr.ValueState
-}
-
-func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 5)
-
- var val tftypes.Value
- var err error
-
- attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx)
-
- objectType := tftypes.Object{AttributeTypes: attrTypes}
-
- switch v.state {
- case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 5)
-
- val, err = v.Page.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["page"] = val
-
- val, err = v.Size.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["size"] = val
-
- val, err = v.Sort.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["sort"] = val
-
- val, err = v.TotalPages.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["total_pages"] = val
-
- val, err = v.TotalRows.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["total_rows"] = val
-
- if err := tftypes.ValidateValue(objectType, vals); err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- return tftypes.NewValue(objectType, vals), nil
- case attr.ValueStateNull:
- return tftypes.NewValue(objectType, nil), nil
- case attr.ValueStateUnknown:
- return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
- default:
- panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
- }
-}
-
-func (v PaginationValue) IsNull() bool {
- return v.state == attr.ValueStateNull
-}
-
-func (v PaginationValue) IsUnknown() bool {
- return v.state == attr.ValueStateUnknown
-}
-
-func (v PaginationValue) String() string {
- return "PaginationValue"
-}
-
-func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributeTypes := map[string]attr.Type{
- "page": basetypes.Int64Type{},
- "size": basetypes.Int64Type{},
- "sort": basetypes.StringType{},
- "total_pages": basetypes.Int64Type{},
- "total_rows": basetypes.Int64Type{},
- }
-
- if v.IsNull() {
- return types.ObjectNull(attributeTypes), diags
- }
-
- if v.IsUnknown() {
- return types.ObjectUnknown(attributeTypes), diags
- }
-
- objVal, diags := types.ObjectValue(
- attributeTypes,
- map[string]attr.Value{
- "page": v.Page,
- "size": v.Size,
- "sort": v.Sort,
- "total_pages": v.TotalPages,
- "total_rows": v.TotalRows,
- })
-
- return objVal, diags
-}
-
-func (v PaginationValue) Equal(o attr.Value) bool {
- other, ok := o.(PaginationValue)
-
- if !ok {
- return false
- }
-
- if v.state != other.state {
- return false
- }
-
- if v.state != attr.ValueStateKnown {
- return true
- }
-
- if !v.Page.Equal(other.Page) {
- return false
- }
-
- if !v.Size.Equal(other.Size) {
- return false
- }
-
- if !v.Sort.Equal(other.Sort) {
- return false
- }
-
- if !v.TotalPages.Equal(other.TotalPages) {
- return false
- }
-
- if !v.TotalRows.Equal(other.TotalRows) {
- return false
- }
-
- return true
-}
-
-func (v PaginationValue) Type(ctx context.Context) attr.Type {
- return PaginationType{
- basetypes.ObjectType{
- AttrTypes: v.AttributeTypes(ctx),
- },
- }
-}
-
-func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
- return map[string]attr.Type{
- "page": basetypes.Int64Type{},
- "size": basetypes.Int64Type{},
- "sort": basetypes.StringType{},
- "total_pages": basetypes.Int64Type{},
- "total_rows": basetypes.Int64Type{},
- }
-}
diff --git a/stackit/internal/services/sqlserverflexalpha/instance/functions.go b/stackit/internal/services/sqlserverflexalpha/instance/functions.go
index 1ad001b4..b451eb70 100644
--- a/stackit/internal/services/sqlserverflexalpha/instance/functions.go
+++ b/stackit/internal/services/sqlserverflexalpha/instance/functions.go
@@ -1,277 +1,281 @@
-package sqlserverflexalpha
+package sqlserverflex
import (
"context"
- "errors"
"fmt"
"math"
"github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/types"
-
- sqlserverflexalpha "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3alpha1api"
-
- sqlserverflexalphaDataGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance/datasources_gen"
- sqlserverflexalphaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance/resources_gen"
+ sqlserverflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
)
-func mapResponseToModel(
+func mapFields(
ctx context.Context,
- resp *sqlserverflexalpha.GetInstanceResponse,
- m *sqlserverflexalphaResGen.InstanceModel,
- tfDiags diag.Diagnostics,
+ resp *sqlserverflex.GetInstanceResponse,
+ model *Model,
+ storage *storageModel,
+ encryption *encryptionModel,
+ network *networkModel,
+ region string,
) error {
- m.BackupSchedule = types.StringValue(resp.GetBackupSchedule())
- m.Edition = types.StringValue(string(resp.GetEdition()))
- m.Encryption = handleEncryption(m, resp)
- m.FlavorId = types.StringValue(resp.GetFlavorId())
- m.Id = types.StringValue(resp.GetId())
- m.InstanceId = types.StringValue(resp.GetId())
- m.IsDeletable = types.BoolValue(resp.GetIsDeletable())
- m.Name = types.StringValue(resp.GetName())
- netAcl, diags := types.ListValueFrom(ctx, types.StringType, resp.Network.GetAcl())
- tfDiags.Append(diags...)
- if diags.HasError() {
- return fmt.Errorf(
- "error converting network acl response value",
- )
+ if resp == nil {
+ return fmt.Errorf("response input is nil")
}
- net, diags := sqlserverflexalphaResGen.NewNetworkValue(
- sqlserverflexalphaResGen.NetworkValue{}.AttributeTypes(ctx),
- map[string]attr.Value{
- "access_scope": types.StringValue(string(resp.Network.GetAccessScope())),
- "acl": netAcl,
- "instance_address": types.StringValue(resp.Network.GetInstanceAddress()),
- "router_address": types.StringValue(resp.Network.GetRouterAddress()),
- },
- )
- tfDiags.Append(diags...)
- if diags.HasError() {
- return errors.New("error converting network response value")
+ if model == nil {
+ return fmt.Errorf("model input is nil")
}
- m.Network = net
- m.Replicas = types.Int64Value(int64(resp.GetReplicas()))
- m.RetentionDays = types.Int64Value(int64(resp.GetRetentionDays()))
- m.Status = types.StringValue(string(resp.GetStatus()))
+ instance := resp
- stor, diags := sqlserverflexalphaResGen.NewStorageValue(
- sqlserverflexalphaResGen.StorageValue{}.AttributeTypes(ctx),
- map[string]attr.Value{
- "class": types.StringValue(resp.Storage.GetClass()),
- "size": types.Int64Value(resp.Storage.GetSize()),
- },
- )
- tfDiags.Append(diags...)
- if diags.HasError() {
- return fmt.Errorf("error converting storage response value")
+ var instanceId string
+ if model.InstanceId.ValueString() != "" {
+ instanceId = model.InstanceId.ValueString()
+ } else if instance.Id != nil {
+ instanceId = *instance.Id
+ } else {
+ return fmt.Errorf("instance id not present")
}
- m.Storage = stor
- m.Version = types.StringValue(string(resp.GetVersion()))
- return nil
-}
-
-func mapDataResponseToModel(
- ctx context.Context,
- resp *sqlserverflexalpha.GetInstanceResponse,
- m *dataSourceModel,
- tfDiags diag.Diagnostics,
-) error {
- m.BackupSchedule = types.StringValue(resp.GetBackupSchedule())
- m.Edition = types.StringValue(string(resp.GetEdition()))
- m.Encryption = handleDSEncryption(m, resp)
- m.FlavorId = types.StringValue(resp.GetFlavorId())
- m.Id = types.StringValue(resp.GetId())
- m.InstanceId = types.StringValue(resp.GetId())
- m.IsDeletable = types.BoolValue(resp.GetIsDeletable())
- m.Name = types.StringValue(resp.GetName())
- netAcl, diags := types.ListValueFrom(ctx, types.StringType, resp.Network.GetAcl())
- tfDiags.Append(diags...)
- if diags.HasError() {
- return fmt.Errorf(
- "error converting network acl response value",
- )
- }
- net, diags := sqlserverflexalphaDataGen.NewNetworkValue(
- sqlserverflexalphaDataGen.NetworkValue{}.AttributeTypes(ctx),
- map[string]attr.Value{
- "access_scope": types.StringValue(string(resp.Network.GetAccessScope())),
- "acl": netAcl,
- "instance_address": types.StringValue(resp.Network.GetInstanceAddress()),
- "router_address": types.StringValue(resp.Network.GetRouterAddress()),
- },
- )
- tfDiags.Append(diags...)
- if diags.HasError() {
- return errors.New("error converting network response value")
- }
- m.Network = net
- m.Replicas = types.Int64Value(int64(resp.GetReplicas()))
- m.RetentionDays = types.Int64Value(int64(resp.GetRetentionDays()))
- m.Status = types.StringValue(string(resp.GetStatus()))
-
- stor, diags := sqlserverflexalphaDataGen.NewStorageValue(
- sqlserverflexalphaDataGen.StorageValue{}.AttributeTypes(ctx),
- map[string]attr.Value{
- "class": types.StringValue(resp.Storage.GetClass()),
- "size": types.Int64Value(resp.Storage.GetSize()),
- },
- )
- tfDiags.Append(diags...)
- if diags.HasError() {
- return fmt.Errorf("error converting storage response value")
- }
- m.Storage = stor
-
- m.Version = types.StringValue(string(resp.GetVersion()))
- return nil
-}
-
-func handleEncryption(
- m *sqlserverflexalphaResGen.InstanceModel,
- resp *sqlserverflexalpha.GetInstanceResponse,
-) sqlserverflexalphaResGen.EncryptionValue {
- if !resp.HasEncryption() ||
- resp.Encryption == nil ||
- resp.Encryption.KekKeyId == "" ||
- resp.Encryption.KekKeyRingId == "" ||
- resp.Encryption.KekKeyVersion == "" ||
- resp.Encryption.ServiceAccount == "" {
- if m.Encryption.IsNull() || m.Encryption.IsUnknown() {
- return sqlserverflexalphaResGen.NewEncryptionValueNull()
+ var storageValues map[string]attr.Value
+ if instance.Storage == nil {
+ storageValues = map[string]attr.Value{
+ "class": storage.Class,
+ "size": storage.Size,
}
- return m.Encryption
- }
-
- enc := sqlserverflexalphaResGen.NewEncryptionValueNull()
- if kVal, ok := resp.Encryption.GetKekKeyIdOk(); ok {
- enc.KekKeyId = types.StringValue(*kVal)
- }
- if kkVal, ok := resp.Encryption.GetKekKeyRingIdOk(); ok {
- enc.KekKeyRingId = types.StringValue(*kkVal)
- }
- if kkvVal, ok := resp.Encryption.GetKekKeyVersionOk(); ok {
- enc.KekKeyVersion = types.StringValue(*kkvVal)
- }
- if sa, ok := resp.Encryption.GetServiceAccountOk(); ok {
- enc.ServiceAccount = types.StringValue(*sa)
- }
- return enc
-}
-
-func handleDSEncryption(
- m *dataSourceModel,
- resp *sqlserverflexalpha.GetInstanceResponse,
-) sqlserverflexalphaDataGen.EncryptionValue {
- if !resp.HasEncryption() ||
- resp.Encryption == nil ||
- resp.Encryption.KekKeyId == "" ||
- resp.Encryption.KekKeyRingId == "" ||
- resp.Encryption.KekKeyVersion == "" ||
- resp.Encryption.ServiceAccount == "" {
- if m.Encryption.IsNull() || m.Encryption.IsUnknown() {
- return sqlserverflexalphaDataGen.NewEncryptionValueNull()
+ } else {
+ storageValues = map[string]attr.Value{
+ "class": types.StringValue(*instance.Storage.Class),
+ "size": types.Int64PointerValue(instance.Storage.Size),
}
- return m.Encryption
+ }
+ storageObject, diags := types.ObjectValue(storageTypes, storageValues)
+ if diags.HasError() {
+ return fmt.Errorf("creating storage: %w", core.DiagsToError(diags))
}
- enc := sqlserverflexalphaDataGen.NewEncryptionValueNull()
- if kVal, ok := resp.Encryption.GetKekKeyIdOk(); ok {
- enc.KekKeyId = types.StringValue(*kVal)
+ var encryptionValues map[string]attr.Value
+ if instance.Encryption == nil {
+ encryptionValues = map[string]attr.Value{
+ "keyring_id": encryption.KeyRingId,
+ "key_id": encryption.KeyId,
+ "key_version": encryption.KeyVersion,
+ "service_account": encryption.ServiceAccount,
+ }
+ } else {
+ encryptionValues = map[string]attr.Value{
+ "keyring_id": types.StringValue(*instance.Encryption.KekKeyRingId),
+ "key_id": types.StringValue(*instance.Encryption.KekKeyId),
+ "key_version": types.StringValue(*instance.Encryption.KekKeyVersion),
+ "service_account": types.StringValue(*instance.Encryption.ServiceAccount),
+ }
}
- if kkVal, ok := resp.Encryption.GetKekKeyRingIdOk(); ok {
- enc.KekKeyRingId = types.StringValue(*kkVal)
+ encryptionObject, diags := types.ObjectValue(encryptionTypes, encryptionValues)
+ if diags.HasError() {
+ return fmt.Errorf("creating encryption: %w", core.DiagsToError(diags))
}
- if kkvVal, ok := resp.Encryption.GetKekKeyVersionOk(); ok {
- enc.KekKeyVersion = types.StringValue(*kkvVal)
+
+ var networkValues map[string]attr.Value
+ if instance.Network == nil {
+ networkValues = map[string]attr.Value{
+ "acl": network.ACL,
+ "access_scope": network.AccessScope,
+ "instance_address": network.InstanceAddress,
+ "router_address": network.RouterAddress,
+ }
+ } else {
+ aclList, diags := types.ListValueFrom(ctx, types.StringType, *instance.Network.Acl)
+ if diags.HasError() {
+ return fmt.Errorf("creating network (acl list): %w", core.DiagsToError(diags))
+ }
+
+ var routerAddress string
+ if instance.Network.RouterAddress != nil {
+ routerAddress = *instance.Network.RouterAddress
+ diags.AddWarning("field missing while mapping fields", "router_address was empty in API response")
+ }
+ if instance.Network.InstanceAddress == nil {
+ return fmt.Errorf("creating network: no instance address returned")
+ }
+ networkValues = map[string]attr.Value{
+ "acl": aclList,
+ "access_scope": types.StringValue(string(*instance.Network.AccessScope)),
+ "instance_address": types.StringValue(*instance.Network.InstanceAddress),
+ "router_address": types.StringValue(routerAddress),
+ }
}
- if sa, ok := resp.Encryption.GetServiceAccountOk(); ok {
- enc.ServiceAccount = types.StringValue(*sa)
+ networkObject, diags := types.ObjectValue(networkTypes, networkValues)
+ if diags.HasError() {
+ return fmt.Errorf("creating network: %w", core.DiagsToError(diags))
}
- return enc
+
+ simplifiedModelBackupSchedule := utils.SimplifyBackupSchedule(model.BackupSchedule.ValueString())
+ // If the value returned by the API is different from the one in the model after simplification,
+ // we update the model so that it causes an error in Terraform
+ if simplifiedModelBackupSchedule != types.StringPointerValue(instance.BackupSchedule).ValueString() {
+ model.BackupSchedule = types.StringPointerValue(instance.BackupSchedule)
+ }
+
+ if instance.Replicas == nil {
+ return fmt.Errorf("instance has no replicas set")
+ }
+
+ if instance.RetentionDays == nil {
+ return fmt.Errorf("instance has no retention days set")
+ }
+
+ if instance.Version == nil {
+ return fmt.Errorf("instance has no version set")
+ }
+
+ if instance.Edition == nil {
+ return fmt.Errorf("instance has no edition set")
+ }
+
+ if instance.Status == nil {
+ return fmt.Errorf("instance has no status set")
+ }
+
+ if instance.IsDeletable == nil {
+ return fmt.Errorf("instance has no IsDeletable set")
+ }
+
+ model.Id = utils.BuildInternalTerraformId(model.ProjectId.ValueString(), region, instanceId)
+ model.InstanceId = types.StringValue(instanceId)
+ model.Name = types.StringPointerValue(instance.Name)
+ model.FlavorId = types.StringPointerValue(instance.FlavorId)
+ model.Replicas = types.Int64Value(int64(*instance.Replicas))
+ model.Storage = storageObject
+ model.Version = types.StringValue(string(*instance.Version))
+ model.Edition = types.StringValue(string(*instance.Edition))
+ model.Region = types.StringValue(region)
+ model.Encryption = encryptionObject
+ model.Network = networkObject
+ model.RetentionDays = types.Int64Value(*instance.RetentionDays)
+ model.Status = types.StringValue(string(*instance.Status))
+ model.IsDeletable = types.BoolValue(*instance.IsDeletable)
+ return nil
}
func toCreatePayload(
- ctx context.Context,
- model *sqlserverflexalphaResGen.InstanceModel,
-) (*sqlserverflexalpha.CreateInstanceRequestPayload, error) {
+ model *Model,
+ storage *storageModel,
+ encryption *encryptionModel,
+ network *networkModel,
+) (*sqlserverflex.CreateInstanceRequestPayload, error) {
if model == nil {
return nil, fmt.Errorf("nil model")
}
- storagePayload := sqlserverflexalpha.StorageCreate{}
- if !model.Storage.IsNull() && !model.Storage.IsUnknown() {
- storagePayload.Class = model.Storage.Class.ValueString()
- storagePayload.Size = model.Storage.Size.ValueInt64()
+ storagePayload := &sqlserverflex.CreateInstanceRequestPayloadGetStorageArgType{}
+ if storage != nil {
+ storagePayload.Class = conversion.StringValueToPointer(storage.Class)
+ storagePayload.Size = conversion.Int64ValueToPointer(storage.Size)
}
- var encryptionPayload *sqlserverflexalpha.InstanceEncryption = nil
- if !model.Encryption.IsNull() && !model.Encryption.IsUnknown() &&
- !model.Encryption.KekKeyId.IsNull() && model.Encryption.KekKeyId.IsUnknown() && model.Encryption.KekKeyId.ValueString() != "" &&
- !model.Encryption.KekKeyRingId.IsNull() && !model.Encryption.KekKeyRingId.IsUnknown() && model.Encryption.KekKeyRingId.ValueString() != "" &&
- !model.Encryption.KekKeyVersion.IsNull() && !model.Encryption.KekKeyVersion.IsUnknown() && model.Encryption.KekKeyVersion.ValueString() != "" &&
- !model.Encryption.ServiceAccount.IsNull() && !model.Encryption.ServiceAccount.IsUnknown() && model.Encryption.ServiceAccount.ValueString() != "" {
- encryptionPayload = &sqlserverflexalpha.InstanceEncryption{
- KekKeyId: model.Encryption.KekKeyId.ValueString(),
- KekKeyRingId: model.Encryption.KekKeyVersion.ValueString(),
- KekKeyVersion: model.Encryption.KekKeyRingId.ValueString(),
- ServiceAccount: model.Encryption.ServiceAccount.ValueString(),
+ var encryptionPayload *sqlserverflex.CreateInstanceRequestPayloadGetEncryptionArgType
+ if encryption != nil &&
+ !encryption.KeyId.IsNull() && !encryption.KeyId.IsUnknown() &&
+ !encryption.KeyRingId.IsNull() && !encryption.KeyRingId.IsUnknown() &&
+ !encryption.KeyVersion.IsNull() && !encryption.KeyVersion.IsUnknown() &&
+ !encryption.ServiceAccount.IsNull() && !encryption.ServiceAccount.IsUnknown() {
+ encryptionPayload = &sqlserverflex.CreateInstanceRequestPayloadGetEncryptionArgType{
+ KekKeyId: conversion.StringValueToPointer(encryption.KeyId),
+ KekKeyRingId: conversion.StringValueToPointer(encryption.KeyVersion),
+ KekKeyVersion: conversion.StringValueToPointer(encryption.KeyRingId),
+ ServiceAccount: conversion.StringValueToPointer(encryption.ServiceAccount),
}
}
- networkPayload := sqlserverflexalpha.CreateInstanceRequestPayloadNetwork{}
- if !model.Network.IsNull() && !model.Network.IsUnknown() {
- networkPayload.AccessScope = (*sqlserverflexalpha.InstanceNetworkAccessScope)(model.Network.AccessScope.ValueStringPointer())
-
- var resList []string
- diags := model.Network.Acl.ElementsAs(ctx, &resList, false)
+ var aclElements []string
+ if network != nil && !network.ACL.IsNull() && !network.ACL.IsUnknown() {
+ aclElements = make([]string, 0, len(network.ACL.Elements()))
+ diags := network.ACL.ElementsAs(context.TODO(), &aclElements, false)
if diags.HasError() {
- return nil, fmt.Errorf("error converting network acl list")
+ return nil, fmt.Errorf("creating network: %w", core.DiagsToError(diags))
}
- networkPayload.Acl = resList
}
- return &sqlserverflexalpha.CreateInstanceRequestPayload{
- BackupSchedule: model.BackupSchedule.ValueString(),
+ networkPayload := &sqlserverflex.CreateInstanceRequestPayloadGetNetworkArgType{}
+ if network != nil {
+ networkPayload.AccessScope = sqlserverflex.CreateInstanceRequestPayloadNetworkGetAccessScopeAttributeType(conversion.StringValueToPointer(network.AccessScope))
+ networkPayload.Acl = &aclElements
+ }
+
+ return &sqlserverflex.CreateInstanceRequestPayload{
+ BackupSchedule: conversion.StringValueToPointer(model.BackupSchedule),
Encryption: encryptionPayload,
- FlavorId: model.FlavorId.ValueString(),
- Name: model.Name.ValueString(),
+ FlavorId: conversion.StringValueToPointer(model.FlavorId),
+ Name: conversion.StringValueToPointer(model.Name),
Network: networkPayload,
- RetentionDays: int32(model.RetentionDays.ValueInt64()), //nolint:gosec // TODO
+ RetentionDays: conversion.Int64ValueToPointer(model.RetentionDays),
Storage: storagePayload,
- Version: sqlserverflexalpha.InstanceVersion(model.Version.ValueString()),
+ Version: sqlserverflex.CreateInstanceRequestPayloadGetVersionAttributeType(conversion.StringValueToPointer(model.Version)),
}, nil
}
-func toUpdatePayload(
- ctx context.Context,
- m *sqlserverflexalphaResGen.InstanceModel,
- resp *resource.UpdateResponse,
-) (*sqlserverflexalpha.UpdateInstanceRequestPayload, error) {
- if m == nil {
+//nolint:unused // TODO: remove if not needed later
+func toUpdatePartiallyPayload(
+ model *Model,
+ storage *storageModel,
+ network *networkModel,
+) (*sqlserverflex.UpdateInstancePartiallyRequestPayload, error) {
+ if model == nil {
return nil, fmt.Errorf("nil model")
}
- if m.Replicas.ValueInt64() > math.MaxUint32 {
- return nil, fmt.Errorf("replicas value is too big for uint32")
- }
- replVal := sqlserverflexalpha.Replicas(uint32(m.Replicas.ValueInt64())) // nolint:gosec // check is performed above
- var netAcl []string
- diags := m.Network.Acl.ElementsAs(ctx, &netAcl, false)
- resp.Diagnostics.Append(diags...)
- if diags.HasError() {
- return nil, fmt.Errorf("error converting model network acl value")
+ storagePayload := &sqlserverflex.UpdateInstanceRequestPayloadGetStorageArgType{}
+ if storage != nil {
+ storagePayload.Size = conversion.Int64ValueToPointer(storage.Size)
}
- return &sqlserverflexalpha.UpdateInstanceRequestPayload{
- BackupSchedule: m.BackupSchedule.ValueString(),
- FlavorId: m.FlavorId.ValueString(),
- Name: m.Name.ValueString(),
- Network: sqlserverflexalpha.UpdateInstanceRequestPayloadNetwork{Acl: netAcl},
- Replicas: replVal,
- RetentionDays: int32(m.RetentionDays.ValueInt64()), //nolint:gosec // TODO
- Storage: sqlserverflexalpha.StorageUpdate{Size: m.Storage.Size.ValueInt64Pointer()},
- Version: sqlserverflexalpha.InstanceVersion(m.Version.ValueString()),
+
+ var aclElements []string
+ if network != nil && !network.ACL.IsNull() && !network.ACL.IsUnknown() {
+ aclElements = make([]string, 0, len(network.ACL.Elements()))
+ diags := network.ACL.ElementsAs(context.TODO(), &aclElements, false)
+ if diags.HasError() {
+ return nil, fmt.Errorf("creating network: %w", core.DiagsToError(diags))
+ }
+ }
+
+ networkPayload := &sqlserverflex.UpdateInstancePartiallyRequestPayloadGetNetworkArgType{}
+ if network != nil {
+ networkPayload.AccessScope = sqlserverflex.UpdateInstancePartiallyRequestPayloadNetworkGetAccessScopeAttributeType(conversion.StringValueToPointer(network.AccessScope))
+ networkPayload.Acl = &aclElements
+ }
+
+ if model.Replicas.ValueInt64() > math.MaxInt32 {
+ return nil, fmt.Errorf("replica count too big: %d", model.Replicas.ValueInt64())
+ }
+ replCount := int32(model.Replicas.ValueInt64()) // nolint:gosec // check is performed above
+ return &sqlserverflex.UpdateInstancePartiallyRequestPayload{
+ BackupSchedule: conversion.StringValueToPointer(model.BackupSchedule),
+ FlavorId: conversion.StringValueToPointer(model.FlavorId),
+ Name: conversion.StringValueToPointer(model.Name),
+ Network: networkPayload,
+ Replicas: sqlserverflex.UpdateInstancePartiallyRequestPayloadGetReplicasAttributeType(&replCount),
+ RetentionDays: conversion.Int64ValueToPointer(model.RetentionDays),
+ Storage: storagePayload,
+ Version: sqlserverflex.UpdateInstancePartiallyRequestPayloadGetVersionAttributeType(conversion.StringValueToPointer(model.Version)),
+ }, nil
+}
+
+// TODO: check func with his args
+func toUpdatePayload(
+ _ *Model,
+ _ *storageModel,
+ _ *networkModel,
+) (*sqlserverflex.UpdateInstanceRequestPayload, error) {
+ return &sqlserverflex.UpdateInstanceRequestPayload{
+ BackupSchedule: nil,
+ FlavorId: nil,
+ Name: nil,
+ Network: nil,
+ Replicas: nil,
+ RetentionDays: nil,
+ Storage: nil,
+ Version: nil,
}, nil
}
diff --git a/stackit/internal/services/sqlserverflexalpha/instance/planModifiers.yaml b/stackit/internal/services/sqlserverflexalpha/instance/planModifiers.yaml
index 71d4cbe4..2e72ba16 100644
--- a/stackit/internal/services/sqlserverflexalpha/instance/planModifiers.yaml
+++ b/stackit/internal/services/sqlserverflexalpha/instance/planModifiers.yaml
@@ -21,6 +21,7 @@ fields:
- name: 'name'
modifiers:
- 'UseStateForUnknown'
+ - 'RequiresReplace'
- name: 'backup_schedule'
modifiers:
@@ -30,28 +31,24 @@ fields:
validators:
- validate.NoSeparator
modifiers:
- - 'UseStateForUnknown'
- 'RequiresReplace'
- name: 'encryption.kek_key_version'
validators:
- validate.NoSeparator
modifiers:
- - 'UseStateForUnknown'
- 'RequiresReplace'
- name: 'encryption.kek_key_ring_id'
validators:
- validate.NoSeparator
modifiers:
- - 'UseStateForUnknown'
- 'RequiresReplace'
- name: 'encryption.service_account'
validators:
- validate.NoSeparator
modifiers:
- - 'UseStateForUnknown'
- 'RequiresReplace'
- name: 'network.access_scope'
@@ -79,7 +76,6 @@ fields:
- name: 'region'
modifiers:
- - 'UseStateForUnknown'
- 'RequiresReplace'
- name: 'retention_days'
diff --git a/stackit/internal/services/sqlserverflexalpha/instance/resource.go b/stackit/internal/services/sqlserverflexalpha/instance/resource.go
index f40cc3f4..092805f3 100644
--- a/stackit/internal/services/sqlserverflexalpha/instance/resource.go
+++ b/stackit/internal/services/sqlserverflexalpha/instance/resource.go
@@ -1,106 +1,132 @@
-package sqlserverflexalpha
+// Copyright (c) STACKIT
+
+package sqlserverflex
import (
"context"
- _ "embed"
"fmt"
"net/http"
+ "regexp"
"strings"
"time"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/boolplanmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier"
+ sqlserverflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils"
+
+ "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ "github.com/hashicorp/terraform-plugin-framework/attr"
"github.com/hashicorp/terraform-plugin-framework/path"
- "github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
- "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
"github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
- "github.com/stackitcloud/stackit-sdk-go/core/oapierror"
-
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- wait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexalpha"
-
- sqlserverflexalpha "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3alpha1api"
-
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/validate"
- sqlserverflexalphaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance/resources_gen"
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/int64planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/objectplanmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
+ wait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexalpha"
+
+ "github.com/stackitcloud/stackit-sdk-go/core/oapierror"
)
+// Ensure the implementation satisfies the expected interfaces.
var (
_ resource.Resource = &instanceResource{}
_ resource.ResourceWithConfigure = &instanceResource{}
_ resource.ResourceWithImportState = &instanceResource{}
_ resource.ResourceWithModifyPlan = &instanceResource{}
- _ resource.ResourceWithIdentity = &instanceResource{}
)
+//nolint:unused // TODO: remove if not needed later
+var validNodeTypes []string = []string{
+ "Single",
+ "Replica",
+}
+
+type Model struct {
+ Id types.String `tfsdk:"id"` // needed by TF
+ InstanceId types.String `tfsdk:"instance_id"`
+ ProjectId types.String `tfsdk:"project_id"`
+ Name types.String `tfsdk:"name"`
+ BackupSchedule types.String `tfsdk:"backup_schedule"`
+ FlavorId types.String `tfsdk:"flavor_id"`
+ Encryption types.Object `tfsdk:"encryption"`
+ IsDeletable types.Bool `tfsdk:"is_deletable"`
+ Storage types.Object `tfsdk:"storage"`
+ Status types.String `tfsdk:"status"`
+ Version types.String `tfsdk:"version"`
+ Replicas types.Int64 `tfsdk:"replicas"`
+ Region types.String `tfsdk:"region"`
+ Network types.Object `tfsdk:"network"`
+ Edition types.String `tfsdk:"edition"`
+ RetentionDays types.Int64 `tfsdk:"retention_days"`
+}
+
+type encryptionModel struct {
+ KeyRingId types.String `tfsdk:"keyring_id"`
+ KeyId types.String `tfsdk:"key_id"`
+ KeyVersion types.String `tfsdk:"key_version"`
+ ServiceAccount types.String `tfsdk:"service_account"`
+}
+
+var encryptionTypes = map[string]attr.Type{
+ "keyring_id": basetypes.StringType{},
+ "key_id": basetypes.StringType{},
+ "key_version": basetypes.StringType{},
+ "service_account": basetypes.StringType{},
+}
+
+type networkModel struct {
+ ACL types.List `tfsdk:"acl"`
+ AccessScope types.String `tfsdk:"access_scope"`
+ InstanceAddress types.String `tfsdk:"instance_address"`
+ RouterAddress types.String `tfsdk:"router_address"`
+}
+
+var networkTypes = map[string]attr.Type{
+ "acl": basetypes.ListType{ElemType: types.StringType},
+ "access_scope": basetypes.StringType{},
+ "instance_address": basetypes.StringType{},
+ "router_address": basetypes.StringType{},
+}
+
+// Struct corresponding to Model.Storage
+type storageModel struct {
+ Class types.String `tfsdk:"class"`
+ Size types.Int64 `tfsdk:"size"`
+}
+
+// Types corresponding to storageModel
+var storageTypes = map[string]attr.Type{
+ "class": basetypes.StringType{},
+ "size": basetypes.Int64Type{},
+}
+
+// NewInstanceResource is a helper function to simplify the provider implementation.
func NewInstanceResource() resource.Resource {
return &instanceResource{}
}
+// instanceResource is the resource implementation.
type instanceResource struct {
client *sqlserverflexalpha.APIClient
providerData core.ProviderData
}
-// resourceModel describes the resource data model.
-type resourceModel = sqlserverflexalphaResGen.InstanceModel
-
-type InstanceResourceIdentityModel struct {
- ProjectID types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- InstanceID types.String `tfsdk:"instance_id"`
-}
-
-func (r *instanceResource) Metadata(
- _ context.Context,
- req resource.MetadataRequest,
- resp *resource.MetadataResponse,
-) {
+// Metadata returns the resource type name.
+func (r *instanceResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_instance"
}
-//go:embed planModifiers.yaml
-var modifiersFileByte []byte
-
-func (r *instanceResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
- s := sqlserverflexalphaResGen.InstanceResourceSchema(ctx)
-
- fields, err := utils.ReadModifiersConfig(modifiersFileByte)
- if err != nil {
- resp.Diagnostics.AddError("error during read modifiers config file", err.Error())
- return
- }
-
- err = utils.AddPlanModifiersToResourceSchema(fields, &s)
- if err != nil {
- resp.Diagnostics.AddError("error adding plan modifiers", err.Error())
- return
- }
- resp.Schema = s
-}
-
-func (r *instanceResource) IdentitySchema(
- _ context.Context,
- _ resource.IdentitySchemaRequest,
- resp *resource.IdentitySchemaResponse,
-) {
- resp.IdentitySchema = identityschema.Schema{
- Attributes: map[string]identityschema.Attribute{
- "project_id": identityschema.StringAttribute{
- RequiredForImport: true, // must be set during import by the practitioner
- },
- "region": identityschema.StringAttribute{
- RequiredForImport: true, // can be defaulted by the provider configuration
- },
- "instance_id": identityschema.StringAttribute{
- RequiredForImport: true, // can be defaulted by the provider configuration
- },
- },
- }
-}
-
// Configure adds the provider configured client to the resource.
func (r *instanceResource) Configure(
ctx context.Context,
@@ -113,31 +139,12 @@ func (r *instanceResource) Configure(
return
}
- apiClientConfigOptions := []config.ConfigurationOption{
- config.WithCustomAuth(r.providerData.RoundTripper),
- utils.UserAgentConfigOption(r.providerData.Version),
- }
- if r.providerData.SQLServerFlexCustomEndpoint != "" {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithEndpoint(r.providerData.SQLServerFlexCustomEndpoint),
- )
- } else {
- apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(r.providerData.GetRegion()))
- }
- apiClient, err := sqlserverflexalpha.NewAPIClient(apiClientConfigOptions...)
- if err != nil {
- resp.Diagnostics.AddError(
- "Error configuring API client",
- fmt.Sprintf(
- "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
- err,
- ),
- )
+ apiClient := sqlserverflexUtils.ConfigureClient(ctx, &r.providerData, &resp.Diagnostics)
+ if resp.Diagnostics.HasError() {
return
}
r.client = apiClient
- tflog.Info(ctx, "sqlserverflexalpha.Instance client configured")
+ tflog.Info(ctx, "SQLServer Flex instance client configured")
}
// ModifyPlan implements resource.ResourceWithModifyPlan.
@@ -147,20 +154,17 @@ func (r *instanceResource) ModifyPlan(
req resource.ModifyPlanRequest,
resp *resource.ModifyPlanResponse,
) { // nolint:gocritic // function signature required by Terraform
+ var configModel Model
// skip initial empty configuration to avoid follow-up errors
if req.Config.Raw.IsNull() {
return
}
- var configModel resourceModel
resp.Diagnostics.Append(req.Config.Get(ctx, &configModel)...)
if resp.Diagnostics.HasError() {
return
}
- if req.Plan.Raw.IsNull() {
- return
- }
- var planModel resourceModel
+ var planModel Model
resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...)
if resp.Diagnostics.HasError() {
return
@@ -177,139 +181,444 @@ func (r *instanceResource) ModifyPlan(
}
}
-func (r *instanceResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
- var data resourceModel
- crateErr := "[SQL Server Flex Alpha - Create] error"
+// Schema defines the schema for the resource.
+func (r *instanceResource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
+ descriptions := map[string]string{
+ "main": "SQLServer Flex ALPHA instance resource schema. Must have a `region` specified in the provider configuration.",
+ "id": "Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`\".",
+ "instance_id": "ID of the SQLServer Flex instance.",
+ "project_id": "STACKIT project ID to which the instance is associated.",
+ "name": "Instance name.",
+ "access_scope": "The access scope of the instance. (SNA | PUBLIC)",
+ "flavor_id": "The flavor ID of the instance.",
+ "acl": "The Access Control List (ACL) for the SQLServer Flex instance.",
+ "backup_schedule": `The backup schedule. Should follow the cron scheduling system format (e.g. "0 0 * * *")`,
+ "region": "The resource region. If not defined, the provider region is used.",
+ "encryption": "The encryption block.",
+ "replicas": "The number of replicas of the SQLServer Flex instance.",
+ "network": "The network block.",
+ "keyring_id": "STACKIT KMS - KeyRing ID of the encryption key to use.",
+ "key_id": "STACKIT KMS - Key ID of the encryption key to use.",
+ "key_version": "STACKIT KMS - Key version to use in the encryption key.",
+ "service:account": "STACKIT KMS - service account to use in the encryption key.",
+ "instance_address": "The returned instance IP address of the SQLServer Flex instance.",
+ "router_address": "The returned router IP address of the SQLServer Flex instance.",
+ }
- // Read Terraform plan data into the model
- resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...)
+ resp.Schema = schema.Schema{
+ Description: descriptions["main"],
+ Attributes: map[string]schema.Attribute{
+ "id": schema.StringAttribute{
+ Description: descriptions["id"],
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "instance_id": schema.StringAttribute{
+ Description: descriptions["instance_id"],
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ Validators: []validator.String{
+ validate.UUID(),
+ validate.NoSeparator(),
+ },
+ },
+ "project_id": schema.StringAttribute{
+ Description: descriptions["project_id"],
+ Required: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Validators: []validator.String{
+ validate.UUID(),
+ validate.NoSeparator(),
+ },
+ },
+ "name": schema.StringAttribute{
+ Description: descriptions["name"],
+ Required: true,
+ Validators: []validator.String{
+ stringvalidator.LengthAtLeast(1),
+ stringvalidator.RegexMatches(
+ regexp.MustCompile("^[a-z]([-a-z0-9]*[a-z0-9])?$"),
+ "must start with a letter, must have lower case letters, numbers or hyphens, and no hyphen at the end",
+ ),
+ },
+ },
+ "backup_schedule": schema.StringAttribute{
+ Description: descriptions["backup_schedule"],
+ Optional: true,
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "is_deletable": schema.BoolAttribute{
+ Description: descriptions["is_deletable"],
+ Optional: true,
+ Computed: true,
+ PlanModifiers: []planmodifier.Bool{
+ boolplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "flavor_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ Required: true,
+ },
+ "replicas": schema.Int64Attribute{
+ Computed: true,
+ PlanModifiers: []planmodifier.Int64{
+ int64planmodifier.UseStateForUnknown(),
+ },
+ },
+ "storage": schema.SingleNestedAttribute{
+ Optional: true,
+ Computed: true,
+ PlanModifiers: []planmodifier.Object{
+ objectplanmodifier.UseStateForUnknown(),
+ },
+ Attributes: map[string]schema.Attribute{
+ "class": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "size": schema.Int64Attribute{
+ Optional: true,
+ Computed: true,
+ PlanModifiers: []planmodifier.Int64{
+ int64planmodifier.UseStateForUnknown(),
+ },
+ },
+ },
+ },
+ "version": schema.StringAttribute{
+ Optional: true,
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "edition": schema.StringAttribute{
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "retention_days": schema.Int64Attribute{
+ Optional: true,
+ Computed: true,
+ PlanModifiers: []planmodifier.Int64{
+ int64planmodifier.UseStateForUnknown(),
+ },
+ },
+ "region": schema.StringAttribute{
+ Optional: true,
+ // must be computed to allow for storing the override value from the provider
+ Computed: true,
+ Description: descriptions["region"],
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ },
+ "status": schema.StringAttribute{
+ Optional: true,
+ // must be computed to allow for storing the override value from the provider
+ Computed: true,
+ Description: descriptions["status"],
+ },
+ "encryption": schema.SingleNestedAttribute{
+ Optional: true,
+ PlanModifiers: []planmodifier.Object{
+ objectplanmodifier.RequiresReplace(),
+ objectplanmodifier.UseStateForUnknown(),
+ },
+ Attributes: map[string]schema.Attribute{
+ "key_id": schema.StringAttribute{
+ Description: descriptions["key_id"],
+ Required: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Validators: []validator.String{
+ validate.NoSeparator(),
+ },
+ },
+ "key_version": schema.StringAttribute{
+ Description: descriptions["key_version"],
+ Required: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Validators: []validator.String{
+ validate.NoSeparator(),
+ },
+ },
+ "keyring_id": schema.StringAttribute{
+ Description: descriptions["keyring_id"],
+ Required: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Validators: []validator.String{
+ validate.NoSeparator(),
+ },
+ },
+ "service_account": schema.StringAttribute{
+ Description: descriptions["service_account"],
+ Required: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Validators: []validator.String{
+ validate.NoSeparator(),
+ },
+ },
+ },
+ Description: descriptions["encryption"],
+ },
+ "network": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "access_scope": schema.StringAttribute{
+ Description: descriptions["access_scope"],
+ Required: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ Validators: []validator.String{
+ validate.NoSeparator(),
+ },
+ },
+ "acl": schema.ListAttribute{
+ Description: descriptions["acl"],
+ ElementType: types.StringType,
+ Required: true,
+ PlanModifiers: []planmodifier.List{
+ listplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "instance_address": schema.StringAttribute{
+ Description: descriptions["instance_address"],
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "router_address": schema.StringAttribute{
+ Description: descriptions["router_address"],
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ },
+ },
+ Description: descriptions["network"],
+ },
+ },
+ }
+}
+// Create creates the resource and sets the initial Terraform state.
+func (r *instanceResource) Create(
+ ctx context.Context,
+ req resource.CreateRequest,
+ resp *resource.CreateResponse,
+) { // nolint:gocritic // function signature required by Terraform
+ var model Model
+ diags := req.Plan.Get(ctx, &model)
+ resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
ctx = core.InitProviderContext(ctx)
- projectId := data.ProjectId.ValueString()
- region := data.Region.ValueString()
+ projectId := model.ProjectId.ValueString()
+ region := model.Region.ValueString()
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "region", region)
+ var storage = &storageModel{}
+ if !model.Storage.IsNull() && !model.Storage.IsUnknown() {
+ diags = model.Storage.As(ctx, storage, basetypes.ObjectAsOptions{})
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+ }
+
+ var encryption = &encryptionModel{}
+ if !model.Encryption.IsNull() && !model.Encryption.IsUnknown() {
+ diags = model.Encryption.As(ctx, encryption, basetypes.ObjectAsOptions{})
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+ }
+
+ var network = &networkModel{}
+ if !model.Network.IsNull() && !model.Network.IsUnknown() {
+ diags = model.Network.As(ctx, network, basetypes.ObjectAsOptions{})
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+ }
+
// Generate API request body from model
- payload, err := toCreatePayload(ctx, &data)
+ payload, err := toCreatePayload(&model, storage, encryption, network)
if err != nil {
core.LogAndAddError(
ctx,
&resp.Diagnostics,
- crateErr,
+ "Error creating instance",
fmt.Sprintf("Creating API payload: %v", err),
)
return
}
- // Create new Instance
- createResp, err := r.client.DefaultAPI.CreateInstanceRequest(
+ // Create new instance
+ createResp, err := r.client.CreateInstanceRequest(
ctx,
projectId,
region,
).CreateInstanceRequestPayload(*payload).Execute()
if err != nil {
- core.LogAndAddError(ctx, &resp.Diagnostics, crateErr, fmt.Sprintf("Calling API: %v", err))
+ core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating instance", fmt.Sprintf("Calling API: %v", err))
return
}
ctx = core.LogResponse(ctx)
- instanceId := createResp.Id
-
- // Example data value setting
- data.InstanceId = types.StringValue("id-from-response")
-
- identity := InstanceResourceIdentityModel{
- ProjectID: types.StringValue(projectId),
- Region: types.StringValue(region),
- InstanceID: types.StringValue(instanceId),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
+ instanceId := *createResp.Id
+ utils.SetAndLogStateFields(
+ ctx, &resp.Diagnostics, &resp.State, map[string]any{
+ "id": utils.BuildInternalTerraformId(projectId, region, instanceId),
+ "instance_id": instanceId,
+ },
+ )
if resp.Diagnostics.HasError() {
return
}
+ // The creation waiter sometimes returns an error from the API: "instance with id xxx has unexpected status Failure"
+ // which can be avoided by sleeping before wait
waitResp, err := wait.CreateInstanceWaitHandler(
ctx,
- r.client.DefaultAPI,
+ r.client,
projectId,
instanceId,
region,
- ).SetSleepBeforeWait(
- 10 * time.Second,
- ).SetTimeout(
- 90 * time.Minute,
- ).WaitWithContext(ctx)
+ ).SetSleepBeforeWait(30 * time.Second).WaitWithContext(ctx)
if err != nil {
core.LogAndAddError(
ctx,
&resp.Diagnostics,
- crateErr,
+ "Error creating instance",
fmt.Sprintf("Instance creation waiting: %v", err),
)
return
}
- if waitResp.Id == "" {
+ if waitResp.FlavorId == nil {
core.LogAndAddError(
ctx,
&resp.Diagnostics,
- crateErr,
- "Instance creation waiting: returned id is nil",
+ "Error creating instance",
+ "Instance creation waiting: returned flavor id is nil",
)
return
}
// Map response body to schema
- err = mapResponseToModel(ctx, waitResp, &data, resp.Diagnostics)
+ err = mapFields(ctx, waitResp, &model, storage, encryption, network, region)
if err != nil {
core.LogAndAddError(
ctx,
&resp.Diagnostics,
- crateErr,
- fmt.Sprintf("processing API payload: %v", err),
+ "Error creating instance",
+ fmt.Sprintf("Processing API payload: %v", err),
)
return
}
-
- // Save data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-
- tflog.Info(ctx, "sqlserverflexalpha.Instance created")
-}
-
-func (r *instanceResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
- var data resourceModel
-
- // Read Terraform prior state data into the model
- resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
+ // Set state to fully populated data
+ diags = resp.State.Set(ctx, model)
+ resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
- // Read identity data
- var identityData InstanceResourceIdentityModel
- resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
+ // After the instance creation, database might not be ready to accept connections immediately.
+ // That is why we add a sleep
+ // TODO - can get removed?
+ time.Sleep(120 * time.Second)
+
+ tflog.Info(ctx, "SQLServer Flex instance created")
+}
+
+// Read refreshes the Terraform state with the latest data.
+func (r *instanceResource) Read(
+ ctx context.Context,
+ req resource.ReadRequest,
+ resp *resource.ReadResponse,
+) { // nolint:gocritic // function signature required by Terraform
+ var model Model
+ diags := req.State.Get(ctx, &model)
+ resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
ctx = core.InitProviderContext(ctx)
- projectId := data.ProjectId.ValueString()
- region := data.Region.ValueString()
+ projectId := model.ProjectId.ValueString()
+ instanceId := model.InstanceId.ValueString()
+ region := r.providerData.GetRegionWithOverride(model.Region)
+
ctx = tflog.SetField(ctx, "project_id", projectId)
+ ctx = tflog.SetField(ctx, "instance_id", instanceId)
ctx = tflog.SetField(ctx, "region", region)
- instanceId := data.InstanceId.ValueString()
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
+ var storage = &storageModel{}
+ if !model.Storage.IsNull() && !model.Storage.IsUnknown() {
+ diags = model.Storage.As(ctx, storage, basetypes.ObjectAsOptions{})
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+ }
- instanceResp, err := r.client.DefaultAPI.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
+ var encryption = &encryptionModel{}
+ if !model.Encryption.IsNull() && !model.Encryption.IsUnknown() {
+ diags = model.Encryption.As(ctx, encryption, basetypes.ObjectAsOptions{})
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+ }
+
+ var network = &networkModel{}
+ if !model.Network.IsNull() && !model.Network.IsUnknown() {
+ diags = model.Network.As(ctx, network, basetypes.ObjectAsOptions{})
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+ }
+
+ instanceResp, err := r.client.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
if err != nil {
oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
if ok && oapiErr.StatusCode == http.StatusNotFound {
@@ -323,7 +632,7 @@ func (r *instanceResource) Read(ctx context.Context, req resource.ReadRequest, r
ctx = core.LogResponse(ctx)
// Map response body to schema
- err = mapResponseToModel(ctx, instanceResp, &data, resp.Diagnostics)
+ err = mapFields(ctx, instanceResp, &model, storage, encryption, network, region)
if err != nil {
core.LogAndAddError(
ctx,
@@ -333,145 +642,146 @@ func (r *instanceResource) Read(ctx context.Context, req resource.ReadRequest, r
)
return
}
-
- // Save identity into Terraform state
- identity := InstanceResourceIdentityModel{
- ProjectID: types.StringValue(projectId),
- Region: types.StringValue(region),
- InstanceID: types.StringValue(instanceId),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
+ // Set refreshed state
+ diags = resp.State.Set(ctx, model)
+ resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
-
- // Save updated data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- tflog.Info(ctx, "sqlserverflexalpha.Instance read")
+ tflog.Info(ctx, "SQLServer Flex instance read")
}
-func (r *instanceResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
- var data resourceModel
- updateInstanceError := "Error updating instance"
-
- resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...)
+// Update updates the resource and sets the updated Terraform state on success.
+func (r *instanceResource) Update(
+ ctx context.Context,
+ req resource.UpdateRequest,
+ resp *resource.UpdateResponse,
+) { // nolint:gocritic // function signature required by Terraform
+ // Retrieve values from plan
+ var model Model
+ diags := req.Plan.Get(ctx, &model)
+ resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
ctx = core.InitProviderContext(ctx)
- projectId := data.ProjectId.ValueString()
- region := data.Region.ValueString()
+ projectId := model.ProjectId.ValueString()
+ instanceId := model.InstanceId.ValueString()
+ region := model.Region.ValueString()
+
ctx = tflog.SetField(ctx, "project_id", projectId)
+ ctx = tflog.SetField(ctx, "instance_id", instanceId)
ctx = tflog.SetField(ctx, "region", region)
- instanceId := data.InstanceId.ValueString()
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
+ var storage = &storageModel{}
+ if !model.Storage.IsNull() && !model.Storage.IsUnknown() {
+ diags = model.Storage.As(ctx, storage, basetypes.ObjectAsOptions{})
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+ }
+
+ var encryption = &encryptionModel{}
+ if !model.Encryption.IsNull() && !model.Encryption.IsUnknown() {
+ diags = model.Encryption.As(ctx, encryption, basetypes.ObjectAsOptions{})
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+ }
+
+ var network = &networkModel{}
+ if !model.Network.IsNull() && !model.Network.IsUnknown() {
+ diags = model.Network.As(ctx, network, basetypes.ObjectAsOptions{})
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+ }
// Generate API request body from model
- payload, err := toUpdatePayload(ctx, &data, resp)
+ payload, err := toUpdatePayload(&model, storage, network)
if err != nil {
core.LogAndAddError(
ctx,
&resp.Diagnostics,
- updateInstanceError,
+ "Error updating instance",
fmt.Sprintf("Creating API payload: %v", err),
)
return
}
// Update existing instance
- err = r.client.DefaultAPI.UpdateInstanceRequest(
+ err = r.client.UpdateInstanceRequest(
ctx,
projectId,
region,
instanceId,
).UpdateInstanceRequestPayload(*payload).Execute()
if err != nil {
- core.LogAndAddError(ctx, &resp.Diagnostics, updateInstanceError, err.Error())
+ core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating instance", err.Error())
return
}
ctx = core.LogResponse(ctx)
- waitResp, err := wait.
- UpdateInstanceWaitHandler(ctx, r.client.DefaultAPI, projectId, instanceId, region).
- SetSleepBeforeWait(15 * time.Second).
- SetTimeout(45 * time.Minute).
- WaitWithContext(ctx)
+ waitResp, err := wait.UpdateInstanceWaitHandler(ctx, r.client, projectId, instanceId, region).WaitWithContext(ctx)
if err != nil {
core.LogAndAddError(
ctx,
&resp.Diagnostics,
- updateInstanceError,
+ "Error updating instance",
fmt.Sprintf("Instance update waiting: %v", err),
)
return
}
// Map response body to schema
- err = mapResponseToModel(ctx, waitResp, &data, resp.Diagnostics)
+ err = mapFields(ctx, waitResp, &model, storage, encryption, network, region)
if err != nil {
core.LogAndAddError(
ctx,
&resp.Diagnostics,
- updateInstanceError,
+ "Error updating instance",
fmt.Sprintf("Processing API payload: %v", err),
)
return
}
-
- identity := InstanceResourceIdentityModel{
- ProjectID: types.StringValue(projectId),
- Region: types.StringValue(region),
- InstanceID: types.StringValue(instanceId),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
+ diags = resp.State.Set(ctx, model)
+ resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
-
- // Save updated data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- tflog.Info(ctx, "sqlserverflexalpha.Instance updated")
+ tflog.Info(ctx, "SQLServer Flex instance updated")
}
-func (r *instanceResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
- var data resourceModel
-
- // Read Terraform prior state data into the model
- resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- // Read identity data
- var identityData InstanceResourceIdentityModel
- resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
+// Delete deletes the resource and removes the Terraform state on success.
+func (r *instanceResource) Delete(
+ ctx context.Context,
+ req resource.DeleteRequest,
+ resp *resource.DeleteResponse,
+) { // nolint:gocritic // function signature required by Terraform
+ // Retrieve values from state
+ var model Model
+ diags := req.State.Get(ctx, &model)
+ resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
ctx = core.InitProviderContext(ctx)
- projectId := identityData.ProjectID.ValueString()
- region := identityData.Region.ValueString()
+ projectId := model.ProjectId.ValueString()
+ instanceId := model.InstanceId.ValueString()
+ region := model.Region.ValueString()
ctx = tflog.SetField(ctx, "project_id", projectId)
+ ctx = tflog.SetField(ctx, "instance_id", instanceId)
ctx = tflog.SetField(ctx, "region", region)
- instanceId := identityData.InstanceID.ValueString()
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
-
// Delete existing instance
- err := r.client.DefaultAPI.DeleteInstanceRequest(ctx, projectId, region, instanceId).Execute()
+ err := r.client.DeleteInstanceRequest(ctx, projectId, region, instanceId).Execute()
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting instance", fmt.Sprintf("Calling API: %v", err))
return
@@ -479,7 +789,7 @@ func (r *instanceResource) Delete(ctx context.Context, req resource.DeleteReques
ctx = core.LogResponse(ctx)
- delResp, err := wait.DeleteInstanceWaitHandler(ctx, r.client.DefaultAPI, projectId, instanceId, region).WaitWithContext(ctx)
+ _, err = wait.DeleteInstanceWaitHandler(ctx, r.client, projectId, instanceId, region).WaitWithContext(ctx)
if err != nil {
core.LogAndAddError(
ctx,
@@ -489,66 +799,29 @@ func (r *instanceResource) Delete(ctx context.Context, req resource.DeleteReques
)
return
}
-
- if delResp != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error deleting instance",
- "wait handler returned non nil result",
- )
- return
- }
-
- resp.State.RemoveResource(ctx)
-
- tflog.Info(ctx, "sqlserverflexalpha.Instance deleted")
+ tflog.Info(ctx, "SQLServer Flex instance deleted")
}
// ImportState imports a resource into the Terraform state on success.
-// The expected format of the resource import identifier is: project_id,zone_id,record_set_id
+// The expected format of the resource import identifier is: project_id,instance_id
func (r *instanceResource) ImportState(
ctx context.Context,
req resource.ImportStateRequest,
resp *resource.ImportStateResponse,
) {
- ctx = core.InitProviderContext(ctx)
+ idParts := strings.Split(req.ID, core.Separator)
- if req.ID != "" {
- idParts := strings.Split(req.ID, core.Separator)
-
- if len(idParts) != 3 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" {
- core.LogAndAddError(
- ctx, &resp.Diagnostics,
- "Error importing instance",
- fmt.Sprintf(
- "Expected import identifier with format [project_id],[region],[instance_id] Got: %q",
- req.ID,
- ),
- )
- return
- }
-
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
+ if len(idParts) != 3 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" {
+ core.LogAndAddError(
+ ctx, &resp.Diagnostics,
+ "Error importing instance",
+ fmt.Sprintf("Expected import identifier with format: [project_id],[region],[instance_id] Got: %q", req.ID),
+ )
return
}
- // If no ID is provided, attempt to read identity attributes from the import configuration
- var identityData InstanceResourceIdentityModel
- resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- projectId := identityData.ProjectID.ValueString()
- region := identityData.Region.ValueString()
- instanceId := identityData.InstanceID.ValueString()
-
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...)
-
- tflog.Info(ctx, "sqlserverflexalpha instance state imported")
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
+ tflog.Info(ctx, "SQLServer Flex instance state imported")
}
diff --git a/stackit/internal/services/sqlserverflexalpha/instance/resource_msh_test.go.bak b/stackit/internal/services/sqlserverflexalpha/instance/resource_msh_test.go.bak
new file mode 100644
index 00000000..7a968fe5
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexalpha/instance/resource_msh_test.go.bak
@@ -0,0 +1,280 @@
+package sqlserverflex
+
+import (
+ "context"
+ "reflect"
+ "testing"
+
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
+
+ sqlserverflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
+)
+
+func TestNewInstanceResource(t *testing.T) {
+ tests := []struct {
+ name string
+ want resource.Resource
+ }{
+ // TODO: Add test cases.
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ if got := NewInstanceResource(); !reflect.DeepEqual(got, tt.want) {
+ t.Errorf("NewInstanceResource() = %v, want %v", got, tt.want)
+ }
+ })
+ }
+}
+
+func Test_instanceResource_Configure(t *testing.T) {
+ type fields struct {
+ client *sqlserverflex.APIClient
+ providerData core.ProviderData
+ }
+ type args struct {
+ ctx context.Context
+ req resource.ConfigureRequest
+ resp *resource.ConfigureResponse
+ }
+ tests := []struct {
+ name string
+ fields fields
+ args args
+ }{
+ // TODO: Add test cases.
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ r := &instanceResource{
+ client: tt.fields.client,
+ providerData: tt.fields.providerData,
+ }
+ r.Configure(tt.args.ctx, tt.args.req, tt.args.resp)
+ })
+ }
+}
+
+func Test_instanceResource_Create(t *testing.T) {
+ type fields struct {
+ client *sqlserverflex.APIClient
+ providerData core.ProviderData
+ }
+ type args struct {
+ ctx context.Context
+ req resource.CreateRequest
+ resp *resource.CreateResponse
+ }
+ tests := []struct {
+ name string
+ fields fields
+ args args
+ }{
+ // TODO: Add test cases.
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ r := &instanceResource{
+ client: tt.fields.client,
+ providerData: tt.fields.providerData,
+ }
+ r.Create(tt.args.ctx, tt.args.req, tt.args.resp)
+ })
+ }
+}
+
+func Test_instanceResource_Delete(t *testing.T) {
+ type fields struct {
+ client *sqlserverflex.APIClient
+ providerData core.ProviderData
+ }
+ type args struct {
+ ctx context.Context
+ req resource.DeleteRequest
+ resp *resource.DeleteResponse
+ }
+ tests := []struct {
+ name string
+ fields fields
+ args args
+ }{
+ // TODO: Add test cases.
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ r := &instanceResource{
+ client: tt.fields.client,
+ providerData: tt.fields.providerData,
+ }
+ r.Delete(tt.args.ctx, tt.args.req, tt.args.resp)
+ })
+ }
+}
+
+func Test_instanceResource_ImportState(t *testing.T) {
+ type fields struct {
+ client *sqlserverflex.APIClient
+ providerData core.ProviderData
+ }
+ type args struct {
+ ctx context.Context
+ req resource.ImportStateRequest
+ resp *resource.ImportStateResponse
+ }
+ tests := []struct {
+ name string
+ fields fields
+ args args
+ }{
+ // TODO: Add test cases.
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ r := &instanceResource{
+ client: tt.fields.client,
+ providerData: tt.fields.providerData,
+ }
+ r.ImportState(tt.args.ctx, tt.args.req, tt.args.resp)
+ })
+ }
+}
+
+func Test_instanceResource_Metadata(t *testing.T) {
+ type fields struct {
+ client *sqlserverflex.APIClient
+ providerData core.ProviderData
+ }
+ type args struct {
+ in0 context.Context
+ req resource.MetadataRequest
+ resp *resource.MetadataResponse
+ }
+ tests := []struct {
+ name string
+ fields fields
+ args args
+ }{
+ // TODO: Add test cases.
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ r := &instanceResource{
+ client: tt.fields.client,
+ providerData: tt.fields.providerData,
+ }
+ r.Metadata(tt.args.in0, tt.args.req, tt.args.resp)
+ })
+ }
+}
+
+func Test_instanceResource_ModifyPlan(t *testing.T) {
+ type fields struct {
+ client *sqlserverflex.APIClient
+ providerData core.ProviderData
+ }
+ type args struct {
+ ctx context.Context
+ req resource.ModifyPlanRequest
+ resp *resource.ModifyPlanResponse
+ }
+ tests := []struct {
+ name string
+ fields fields
+ args args
+ }{
+ // TODO: Add test cases.
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ r := &instanceResource{
+ client: tt.fields.client,
+ providerData: tt.fields.providerData,
+ }
+ r.ModifyPlan(tt.args.ctx, tt.args.req, tt.args.resp)
+ })
+ }
+}
+
+func Test_instanceResource_Read(t *testing.T) {
+ type fields struct {
+ client *sqlserverflex.APIClient
+ providerData core.ProviderData
+ }
+ type args struct {
+ ctx context.Context
+ req resource.ReadRequest
+ resp *resource.ReadResponse
+ }
+ tests := []struct {
+ name string
+ fields fields
+ args args
+ }{
+ // TODO: Add test cases.
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ r := &instanceResource{
+ client: tt.fields.client,
+ providerData: tt.fields.providerData,
+ }
+ r.Read(tt.args.ctx, tt.args.req, tt.args.resp)
+ })
+ }
+}
+
+func Test_instanceResource_Schema(t *testing.T) {
+ type fields struct {
+ client *sqlserverflex.APIClient
+ providerData core.ProviderData
+ }
+ type args struct {
+ in0 context.Context
+ in1 resource.SchemaRequest
+ resp *resource.SchemaResponse
+ }
+ tests := []struct {
+ name string
+ fields fields
+ args args
+ }{
+ // TODO: Add test cases.
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ r := &instanceResource{
+ client: tt.fields.client,
+ providerData: tt.fields.providerData,
+ }
+ r.Schema(tt.args.in0, tt.args.in1, tt.args.resp)
+ })
+ }
+}
+
+func Test_instanceResource_Update(t *testing.T) {
+ type fields struct {
+ client *sqlserverflex.APIClient
+ providerData core.ProviderData
+ }
+ type args struct {
+ ctx context.Context
+ req resource.UpdateRequest
+ resp *resource.UpdateResponse
+ }
+ tests := []struct {
+ name string
+ fields fields
+ args args
+ }{
+ // TODO: Add test cases.
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ r := &instanceResource{
+ client: tt.fields.client,
+ providerData: tt.fields.providerData,
+ }
+ r.Update(tt.args.ctx, tt.args.req, tt.args.resp)
+ })
+ }
+}
diff --git a/stackit/internal/services/sqlserverflexalpha/instance/resource_test.go b/stackit/internal/services/sqlserverflexalpha/instance/resource_test.go
new file mode 100644
index 00000000..7768f1e9
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexalpha/instance/resource_test.go
@@ -0,0 +1,837 @@
+// Copyright (c) STACKIT
+
+package sqlserverflex
+
+import (
+ "context"
+ "testing"
+
+ "github.com/google/go-cmp/cmp"
+ "github.com/hashicorp/terraform-plugin-framework/attr"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
+ "github.com/stackitcloud/stackit-sdk-go/core/utils"
+ sqlserverflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
+)
+
+// type sqlserverflexClientMocked struct {
+// returnError bool
+// listFlavorsResp *sqlserverflex.GetFlavorsResponse
+// }
+//
+// func (c *sqlserverflexClientMocked) GetFlavorsExecute(_ context.Context, _, _ string) (*sqlserverflex.GetFlavorsResponse, error) {
+// if c.returnError {
+// return nil, fmt.Errorf("get flavors failed")
+// }
+//
+// return c.listFlavorsResp, nil
+// }
+
+func TestMapFields(t *testing.T) {
+ t.Skip("Skipping - needs refactoring")
+ const testRegion = "region"
+ tests := []struct {
+ description string
+ state Model
+ input *sqlserverflex.GetInstanceResponse
+ storage *storageModel
+ encryption *encryptionModel
+ network *networkModel
+ region string
+ expected Model
+ isValid bool
+ }{
+ {
+ "default_values",
+ Model{
+ InstanceId: types.StringValue("iid"),
+ ProjectId: types.StringValue("pid"),
+ Replicas: types.Int64Value(1),
+ RetentionDays: types.Int64Value(1),
+ Version: types.StringValue("v1"),
+ Edition: types.StringValue("edition 1"),
+ Status: types.StringValue("status"),
+ IsDeletable: types.BoolValue(true),
+ },
+ &sqlserverflex.GetInstanceResponse{
+ FlavorId: utils.Ptr("flavor_id"),
+ Replicas: sqlserverflex.GetInstanceResponseGetReplicasAttributeType(utils.Ptr(int32(1))),
+ RetentionDays: utils.Ptr(int64(1)),
+ Version: sqlserverflex.GetInstanceResponseGetVersionAttributeType(utils.Ptr("v1")),
+ Edition: sqlserverflex.GetInstanceResponseGetEditionAttributeType(utils.Ptr("edition 1")),
+ Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr("status")),
+ IsDeletable: utils.Ptr(true),
+ },
+ &storageModel{},
+ &encryptionModel{},
+ &networkModel{
+ ACL: types.ListNull(basetypes.StringType{}),
+ },
+ testRegion,
+ Model{
+ Id: types.StringValue("pid,region,iid"),
+ InstanceId: types.StringValue("iid"),
+ ProjectId: types.StringValue("pid"),
+ Name: types.StringNull(),
+ BackupSchedule: types.StringNull(),
+ Replicas: types.Int64Value(1),
+ Storage: types.ObjectValueMust(storageTypes, map[string]attr.Value{
+ "class": types.StringNull(),
+ "size": types.Int64Null(),
+ }),
+ Encryption: types.ObjectValueMust(encryptionTypes, map[string]attr.Value{
+ "keyring_id": types.StringNull(),
+ "key_id": types.StringNull(),
+ "key_version": types.StringNull(),
+ "service_account": types.StringNull(),
+ }),
+ Network: types.ObjectValueMust(networkTypes, map[string]attr.Value{
+ "acl": types.ListNull(types.StringType),
+ "access_scope": types.StringNull(),
+ "instance_address": types.StringNull(),
+ "router_address": types.StringNull(),
+ }),
+ IsDeletable: types.BoolValue(true),
+ Edition: types.StringValue("edition 1"),
+ Status: types.StringValue("status"),
+ RetentionDays: types.Int64Value(1),
+ Version: types.StringValue("v1"),
+ Region: types.StringValue(testRegion),
+ },
+ true,
+ },
+ {
+ "simple_values",
+ Model{
+ InstanceId: types.StringValue("iid"),
+ ProjectId: types.StringValue("pid"),
+ },
+ &sqlserverflex.GetInstanceResponse{
+ BackupSchedule: utils.Ptr("schedule"),
+ FlavorId: utils.Ptr("flavor_id"),
+ Id: utils.Ptr("iid"),
+ Name: utils.Ptr("name"),
+ Replicas: sqlserverflex.GetInstanceResponseGetReplicasAttributeType(utils.Ptr(int32(56))),
+ Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr("status")),
+ Storage: &sqlserverflex.Storage{
+ Class: utils.Ptr("class"),
+ Size: utils.Ptr(int64(78)),
+ },
+ Edition: sqlserverflex.GetInstanceResponseGetEditionAttributeType(utils.Ptr("edition")),
+ RetentionDays: utils.Ptr(int64(1)),
+ Version: sqlserverflex.GetInstanceResponseGetVersionAttributeType(utils.Ptr("version")),
+ IsDeletable: utils.Ptr(true),
+ Encryption: nil,
+ Network: &sqlserverflex.InstanceNetwork{
+ AccessScope: nil,
+ Acl: &[]string{
+ "ip1",
+ "ip2",
+ "",
+ },
+ InstanceAddress: nil,
+ RouterAddress: nil,
+ },
+ },
+ &storageModel{},
+ &encryptionModel{},
+ &networkModel{
+ ACL: types.ListValueMust(basetypes.StringType{}, []attr.Value{
+ types.StringValue("ip1"),
+ types.StringValue("ip2"),
+ types.StringValue(""),
+ }),
+ },
+ testRegion,
+ Model{
+ Id: types.StringValue("pid,region,iid"),
+ InstanceId: types.StringValue("iid"),
+ ProjectId: types.StringValue("pid"),
+ Name: types.StringValue("name"),
+ BackupSchedule: types.StringValue("schedule"),
+ Replicas: types.Int64Value(56),
+ Storage: types.ObjectValueMust(storageTypes, map[string]attr.Value{
+ "class": types.StringValue("class"),
+ "size": types.Int64Value(78),
+ }),
+ Network: types.ObjectValueMust(networkTypes, map[string]attr.Value{
+ "acl": types.ListValueMust(types.StringType, []attr.Value{
+ types.StringValue("ip1"),
+ types.StringValue("ip2"),
+ types.StringValue(""),
+ }),
+ "access_scope": types.StringNull(),
+ "instance_address": types.StringNull(),
+ "router_address": types.StringNull(),
+ }),
+ Edition: types.StringValue("edition"),
+ RetentionDays: types.Int64Value(1),
+ Version: types.StringValue("version"),
+ Region: types.StringValue(testRegion),
+ IsDeletable: types.BoolValue(true),
+ Encryption: types.ObjectValueMust(encryptionTypes, map[string]attr.Value{
+ "keyring_id": types.StringNull(),
+ "key_id": types.StringNull(),
+ "key_version": types.StringNull(),
+ "service_account": types.StringNull(),
+ }),
+ Status: types.StringValue("status"),
+ },
+ true,
+ },
+ // {
+ // "simple_values_no_flavor_and_storage",
+ // Model{
+ // InstanceId: types.StringValue("iid"),
+ // ProjectId: types.StringValue("pid"),
+ // },
+ // &sqlserverflex.GetInstanceResponse{
+ // Acl: &[]string{
+ // "ip1",
+ // "ip2",
+ // "",
+ // },
+ // BackupSchedule: utils.Ptr("schedule"),
+ // FlavorId: nil,
+ // Id: utils.Ptr("iid"),
+ // Name: utils.Ptr("name"),
+ // Replicas: sqlserverflex.GetInstanceResponseGetReplicasAttributeType(utils.Ptr(int32(56))),
+ // Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr("status")),
+ // Storage: nil,
+ // Edition: sqlserverflex.GetInstanceResponseGetEditionAttributeType(utils.Ptr("edition")),
+ // RetentionDays: utils.Ptr(int64(1)),
+ // Version: sqlserverflex.GetInstanceResponseGetVersionAttributeType(utils.Ptr("version")),
+ // },
+ // &flavorModel{
+ // CPU: types.Int64Value(12),
+ // RAM: types.Int64Value(34),
+ // },
+ // &storageModel{
+ // Class: types.StringValue("class"),
+ // Size: types.Int64Value(78),
+ // },
+ // &optionsModel{
+ // Edition: types.StringValue("edition"),
+ // RetentionDays: types.Int64Value(1),
+ // },
+ // testRegion,
+ // Model{
+ // Id: types.StringValue("pid,region,iid"),
+ // InstanceId: types.StringValue("iid"),
+ // ProjectId: types.StringValue("pid"),
+ // Name: types.StringValue("name"),
+ // ACL: types.ListValueMust(types.StringType, []attr.Value{
+ // types.StringValue("ip1"),
+ // types.StringValue("ip2"),
+ // types.StringValue(""),
+ // }),
+ // BackupSchedule: types.StringValue("schedule"),
+ // Flavor: types.ObjectValueMust(flavorTypes, map[string]attr.Value{
+ // "id": types.StringNull(),
+ // "description": types.StringNull(),
+ // "cpu": types.Int64Value(12),
+ // "ram": types.Int64Value(34),
+ // }),
+ // Replicas: types.Int64Value(56),
+ // Storage: types.ObjectValueMust(storageTypes, map[string]attr.Value{
+ // "class": types.StringValue("class"),
+ // "size": types.Int64Value(78),
+ // }),
+ // Options: types.ObjectValueMust(optionsTypes, map[string]attr.Value{
+ // "edition": types.StringValue("edition"),
+ // "retention_days": types.Int64Value(1),
+ // }),
+ // Version: types.StringValue("version"),
+ // Region: types.StringValue(testRegion),
+ // },
+ // true,
+ // },
+ // {
+ // "acls_unordered",
+ // Model{
+ // InstanceId: types.StringValue("iid"),
+ // ProjectId: types.StringValue("pid"),
+ // ACL: types.ListValueMust(types.StringType, []attr.Value{
+ // types.StringValue("ip2"),
+ // types.StringValue(""),
+ // types.StringValue("ip1"),
+ // }),
+ // },
+ // &sqlserverflex.GetInstanceResponse{
+ // Acl: &[]string{
+ // "",
+ // "ip1",
+ // "ip2",
+ // },
+ // BackupSchedule: utils.Ptr("schedule"),
+ // FlavorId: nil,
+ // Id: utils.Ptr("iid"),
+ // Name: utils.Ptr("name"),
+ // Replicas: sqlserverflex.GetInstanceResponseGetReplicasAttributeType(utils.Ptr(int32(56))),
+ // Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr("status")),
+ // Storage: nil,
+ // //Options: &map[string]string{
+ // // "edition": "edition",
+ // // "retentionDays": "1",
+ // //},
+ // Version: sqlserverflex.GetInstanceResponseGetVersionAttributeType(utils.Ptr("version")),
+ // },
+ // &flavorModel{
+ // CPU: types.Int64Value(12),
+ // RAM: types.Int64Value(34),
+ // },
+ // &storageModel{
+ // Class: types.StringValue("class"),
+ // Size: types.Int64Value(78),
+ // },
+ // &optionsModel{},
+ // testRegion,
+ // Model{
+ // Id: types.StringValue("pid,region,iid"),
+ // InstanceId: types.StringValue("iid"),
+ // ProjectId: types.StringValue("pid"),
+ // Name: types.StringValue("name"),
+ // ACL: types.ListValueMust(types.StringType, []attr.Value{
+ // types.StringValue("ip2"),
+ // types.StringValue(""),
+ // types.StringValue("ip1"),
+ // }),
+ // BackupSchedule: types.StringValue("schedule"),
+ // Flavor: types.ObjectValueMust(flavorTypes, map[string]attr.Value{
+ // "id": types.StringNull(),
+ // "description": types.StringNull(),
+ // "cpu": types.Int64Value(12),
+ // "ram": types.Int64Value(34),
+ // }),
+ // Replicas: types.Int64Value(56),
+ // Storage: types.ObjectValueMust(storageTypes, map[string]attr.Value{
+ // "class": types.StringValue("class"),
+ // "size": types.Int64Value(78),
+ // }),
+ // Options: types.ObjectValueMust(optionsTypes, map[string]attr.Value{
+ // "edition": types.StringValue("edition"),
+ // "retention_days": types.Int64Value(1),
+ // }),
+ // Version: types.StringValue("version"),
+ // Region: types.StringValue(testRegion),
+ // },
+ // true,
+ // },
+ // {
+ // "nil_response",
+ // Model{
+ // InstanceId: types.StringValue("iid"),
+ // ProjectId: types.StringValue("pid"),
+ // },
+ // nil,
+ // &flavorModel{},
+ // &storageModel{},
+ // &optionsModel{},
+ // testRegion,
+ // Model{},
+ // false,
+ // },
+ // {
+ // "no_resource_id",
+ // Model{
+ // InstanceId: types.StringValue("iid"),
+ // ProjectId: types.StringValue("pid"),
+ // },
+ // &sqlserverflex.GetInstanceResponse{},
+ // &flavorModel{},
+ // &storageModel{},
+ // &optionsModel{},
+ // testRegion,
+ // Model{},
+ // false,
+ // },
+ }
+ for _, tt := range tests {
+ t.Run(tt.description, func(t *testing.T) {
+ err := mapFields(context.Background(), tt.input, &tt.state, tt.storage, tt.encryption, tt.network, tt.region)
+ if !tt.isValid && err == nil {
+ t.Fatalf("Should have failed")
+ }
+ if tt.isValid && err != nil {
+ t.Fatalf("Should not have failed: %v", err)
+ }
+ if tt.isValid {
+ diff := cmp.Diff(tt.state, tt.expected)
+ if diff != "" {
+ t.Fatalf("Data does not match: %s", diff)
+ }
+ }
+ })
+ }
+}
+
+// func TestToCreatePayload(t *testing.T) {
+// tests := []struct {
+// description string
+// input *Model
+// inputAcl []string
+// inputFlavor *flavorModel
+// inputStorage *storageModel
+// inputOptions *optionsModel
+// expected *sqlserverflex.CreateInstanceRequestPayload
+// isValid bool
+// }{
+// {
+// "default_values",
+// &Model{},
+// []string{},
+// &flavorModel{},
+// &storageModel{},
+// &optionsModel{},
+// &sqlserverflex.CreateInstanceRequestPayload{
+// Acl: &sqlserverflex.CreateInstanceRequestPayloadGetAclArgType{},
+// Storage: &sqlserverflex.CreateInstanceRequestPayloadGetStorageArgType{},
+// },
+// true,
+// },
+// {
+// "simple_values",
+// &Model{
+// BackupSchedule: types.StringValue("schedule"),
+// Name: types.StringValue("name"),
+// Replicas: types.Int64Value(12),
+// Version: types.StringValue("version"),
+// },
+// []string{
+// "ip_1",
+// "ip_2",
+// },
+// &flavorModel{
+// Id: types.StringValue("flavor_id"),
+// },
+// &storageModel{
+// Class: types.StringValue("class"),
+// Size: types.Int64Value(34),
+// },
+// &optionsModel{
+// Edition: types.StringValue("edition"),
+// RetentionDays: types.Int64Value(1),
+// },
+// &sqlserverflex.CreateInstancePayload{
+// Acl: &sqlserverflex.CreateInstancePayloadAcl{
+// Items: &[]string{
+// "ip_1",
+// "ip_2",
+// },
+// },
+// BackupSchedule: utils.Ptr("schedule"),
+// FlavorId: utils.Ptr("flavor_id"),
+// Name: utils.Ptr("name"),
+// Storage: &sqlserverflex.CreateInstancePayloadStorage{
+// Class: utils.Ptr("class"),
+// Size: utils.Ptr(int64(34)),
+// },
+// Options: &sqlserverflex.CreateInstancePayloadOptions{
+// Edition: utils.Ptr("edition"),
+// RetentionDays: utils.Ptr("1"),
+// },
+// Version: utils.Ptr("version"),
+// },
+// true,
+// },
+// {
+// "null_fields_and_int_conversions",
+// &Model{
+// BackupSchedule: types.StringNull(),
+// Name: types.StringNull(),
+// Replicas: types.Int64Value(2123456789),
+// Version: types.StringNull(),
+// },
+// []string{
+// "",
+// },
+// &flavorModel{
+// Id: types.StringNull(),
+// },
+// &storageModel{
+// Class: types.StringNull(),
+// Size: types.Int64Null(),
+// },
+// &optionsModel{
+// Edition: types.StringNull(),
+// RetentionDays: types.Int64Null(),
+// },
+// &sqlserverflex.CreateInstancePayload{
+// Acl: &sqlserverflex.CreateInstancePayloadAcl{
+// Items: &[]string{
+// "",
+// },
+// },
+// BackupSchedule: nil,
+// FlavorId: nil,
+// Name: nil,
+// Storage: &sqlserverflex.CreateInstancePayloadStorage{
+// Class: nil,
+// Size: nil,
+// },
+// Options: &sqlserverflex.CreateInstancePayloadOptions{},
+// Version: nil,
+// },
+// true,
+// },
+// {
+// "nil_model",
+// nil,
+// []string{},
+// &flavorModel{},
+// &storageModel{},
+// &optionsModel{},
+// nil,
+// false,
+// },
+// {
+// "nil_acl",
+// &Model{},
+// nil,
+// &flavorModel{},
+// &storageModel{},
+// &optionsModel{},
+// &sqlserverflex.CreateInstancePayload{
+// Acl: &sqlserverflex.CreateInstancePayloadAcl{},
+// Storage: &sqlserverflex.CreateInstancePayloadStorage{},
+// Options: &sqlserverflex.CreateInstancePayloadOptions{},
+// },
+// true,
+// },
+// {
+// "nil_flavor",
+// &Model{},
+// []string{},
+// nil,
+// &storageModel{},
+// &optionsModel{},
+// nil,
+// false,
+// },
+// {
+// "nil_storage",
+// &Model{},
+// []string{},
+// &flavorModel{},
+// nil,
+// &optionsModel{},
+// &sqlserverflex.CreateInstancePayload{
+// Acl: &sqlserverflex.CreateInstancePayloadAcl{
+// Items: &[]string{},
+// },
+// Storage: &sqlserverflex.CreateInstancePayloadStorage{},
+// Options: &sqlserverflex.CreateInstancePayloadOptions{},
+// },
+// true,
+// },
+// {
+// "nil_options",
+// &Model{},
+// []string{},
+// &flavorModel{},
+// &storageModel{},
+// nil,
+// &sqlserverflex.CreateInstancePayload{
+// Acl: &sqlserverflex.CreateInstancePayloadAcl{
+// Items: &[]string{},
+// },
+// Storage: &sqlserverflex.CreateInstancePayloadStorage{},
+// Options: &sqlserverflex.CreateInstancePayloadOptions{},
+// },
+// true,
+// },
+// }
+// for _, tt := range tests {
+// t.Run(tt.description, func(t *testing.T) {
+// output, err := toCreatePayload(tt.input, tt.inputAcl, tt.inputFlavor, tt.inputStorage, tt.inputOptions)
+// if !tt.isValid && err == nil {
+// t.Fatalf("Should have failed")
+// }
+// if tt.isValid && err != nil {
+// t.Fatalf("Should not have failed: %v", err)
+// }
+// if tt.isValid {
+// diff := cmp.Diff(output, tt.expected)
+// if diff != "" {
+// t.Fatalf("Data does not match: %s", diff)
+// }
+// }
+// })
+// }
+// }
+//
+// func TestToUpdatePayload(t *testing.T) {
+// tests := []struct {
+// description string
+// input *Model
+// inputAcl []string
+// inputFlavor *flavorModel
+// expected *sqlserverflex.PartialUpdateInstancePayload
+// isValid bool
+// }{
+// {
+// "default_values",
+// &Model{},
+// []string{},
+// &flavorModel{},
+// &sqlserverflex.PartialUpdateInstancePayload{
+// Acl: &sqlserverflex.CreateInstancePayloadAcl{
+// Items: &[]string{},
+// },
+// },
+// true,
+// },
+// {
+// "simple_values",
+// &Model{
+// BackupSchedule: types.StringValue("schedule"),
+// Name: types.StringValue("name"),
+// Replicas: types.Int64Value(12),
+// Version: types.StringValue("version"),
+// },
+// []string{
+// "ip_1",
+// "ip_2",
+// },
+// &flavorModel{
+// Id: types.StringValue("flavor_id"),
+// },
+// &sqlserverflex.PartialUpdateInstancePayload{
+// Acl: &sqlserverflex.CreateInstancePayloadAcl{
+// Items: &[]string{
+// "ip_1",
+// "ip_2",
+// },
+// },
+// BackupSchedule: utils.Ptr("schedule"),
+// FlavorId: utils.Ptr("flavor_id"),
+// Name: utils.Ptr("name"),
+// Version: utils.Ptr("version"),
+// },
+// true,
+// },
+// {
+// "null_fields_and_int_conversions",
+// &Model{
+// BackupSchedule: types.StringNull(),
+// Name: types.StringNull(),
+// Replicas: types.Int64Value(2123456789),
+// Version: types.StringNull(),
+// },
+// []string{
+// "",
+// },
+// &flavorModel{
+// Id: types.StringNull(),
+// },
+// &sqlserverflex.PartialUpdateInstancePayload{
+// Acl: &sqlserverflex.CreateInstancePayloadAcl{
+// Items: &[]string{
+// "",
+// },
+// },
+// BackupSchedule: nil,
+// FlavorId: nil,
+// Name: nil,
+// Version: nil,
+// },
+// true,
+// },
+// {
+// "nil_model",
+// nil,
+// []string{},
+// &flavorModel{},
+// nil,
+// false,
+// },
+// {
+// "nil_acl",
+// &Model{},
+// nil,
+// &flavorModel{},
+// &sqlserverflex.PartialUpdateInstancePayload{
+// Acl: &sqlserverflex.CreateInstancePayloadAcl{},
+// },
+// true,
+// },
+// {
+// "nil_flavor",
+// &Model{},
+// []string{},
+// nil,
+// nil,
+// false,
+// },
+// }
+// for _, tt := range tests {
+// t.Run(tt.description, func(t *testing.T) {
+// output, err := toUpdatePayload(tt.input, tt.inputAcl, tt.inputFlavor)
+// if !tt.isValid && err == nil {
+// t.Fatalf("Should have failed")
+// }
+// if tt.isValid && err != nil {
+// t.Fatalf("Should not have failed: %v", err)
+// }
+// if tt.isValid {
+// diff := cmp.Diff(output, tt.expected)
+// if diff != "" {
+// t.Fatalf("Data does not match: %s", diff)
+// }
+// }
+// })
+// }
+// }
+//
+// func TestLoadFlavorId(t *testing.T) {
+// tests := []struct {
+// description string
+// inputFlavor *flavorModel
+// mockedResp *sqlserverflex.ListFlavorsResponse
+// expected *flavorModel
+// getFlavorsFails bool
+// isValid bool
+// }{
+// {
+// "ok_flavor",
+// &flavorModel{
+// CPU: types.Int64Value(2),
+// RAM: types.Int64Value(8),
+// },
+// &sqlserverflex.ListFlavorsResponse{
+// Flavors: &[]sqlserverflex.InstanceFlavorEntry{
+// {
+// Id: utils.Ptr("fid-1"),
+// Cpu: utils.Ptr(int64(2)),
+// Description: utils.Ptr("description"),
+// Ram: utils.Ptr(int64(8)),
+// },
+// },
+// },
+// &flavorModel{
+// Id: types.StringValue("fid-1"),
+// Description: types.StringValue("description"),
+// CPU: types.Int64Value(2),
+// RAM: types.Int64Value(8),
+// },
+// false,
+// true,
+// },
+// {
+// "ok_flavor_2",
+// &flavorModel{
+// CPU: types.Int64Value(2),
+// RAM: types.Int64Value(8),
+// },
+// &sqlserverflex.ListFlavorsResponse{
+// Flavors: &[]sqlserverflex.InstanceFlavorEntry{
+// {
+// Id: utils.Ptr("fid-1"),
+// Cpu: utils.Ptr(int64(2)),
+// Description: utils.Ptr("description"),
+// Ram: utils.Ptr(int64(8)),
+// },
+// {
+// Id: utils.Ptr("fid-2"),
+// Cpu: utils.Ptr(int64(1)),
+// Description: utils.Ptr("description"),
+// Ram: utils.Ptr(int64(4)),
+// },
+// },
+// },
+// &flavorModel{
+// Id: types.StringValue("fid-1"),
+// Description: types.StringValue("description"),
+// CPU: types.Int64Value(2),
+// RAM: types.Int64Value(8),
+// },
+// false,
+// true,
+// },
+// {
+// "no_matching_flavor",
+// &flavorModel{
+// CPU: types.Int64Value(2),
+// RAM: types.Int64Value(8),
+// },
+// &sqlserverflex.ListFlavorsResponse{
+// Flavors: &[]sqlserverflex.InstanceFlavorEntry{
+// {
+// Id: utils.Ptr("fid-1"),
+// Cpu: utils.Ptr(int64(1)),
+// Description: utils.Ptr("description"),
+// Ram: utils.Ptr(int64(8)),
+// },
+// {
+// Id: utils.Ptr("fid-2"),
+// Cpu: utils.Ptr(int64(1)),
+// Description: utils.Ptr("description"),
+// Ram: utils.Ptr(int64(4)),
+// },
+// },
+// },
+// &flavorModel{
+// CPU: types.Int64Value(2),
+// RAM: types.Int64Value(8),
+// },
+// false,
+// false,
+// },
+// {
+// "nil_response",
+// &flavorModel{
+// CPU: types.Int64Value(2),
+// RAM: types.Int64Value(8),
+// },
+// &sqlserverflex.ListFlavorsResponse{},
+// &flavorModel{
+// CPU: types.Int64Value(2),
+// RAM: types.Int64Value(8),
+// },
+// false,
+// false,
+// },
+// {
+// "error_response",
+// &flavorModel{
+// CPU: types.Int64Value(2),
+// RAM: types.Int64Value(8),
+// },
+// &sqlserverflex.ListFlavorsResponse{},
+// &flavorModel{
+// CPU: types.Int64Value(2),
+// RAM: types.Int64Value(8),
+// },
+// true,
+// false,
+// },
+// }
+// for _, tt := range tests {
+// t.Run(tt.description, func(t *testing.T) {
+// client := &sqlserverflexClientMocked{
+// returnError: tt.getFlavorsFails,
+// listFlavorsResp: tt.mockedResp,
+// }
+// model := &Model{
+// ProjectId: types.StringValue("pid"),
+// }
+// flavorModel := &flavorModel{
+// CPU: tt.inputFlavor.CPU,
+// RAM: tt.inputFlavor.RAM,
+// }
+// err := loadFlavorId(context.Background(), client, model, flavorModel)
+// if !tt.isValid && err == nil {
+// t.Fatalf("Should have failed")
+// }
+// if tt.isValid && err != nil {
+// t.Fatalf("Should not have failed: %v", err)
+// }
+// if tt.isValid {
+// diff := cmp.Diff(flavorModel, tt.expected)
+// if diff != "" {
+// t.Fatalf("Data does not match: %s", diff)
+// }
+// }
+// })
+// }
+// }
diff --git a/stackit/internal/services/sqlserverflexalpha/instance/resources_gen/instance_resource_gen.go b/stackit/internal/services/sqlserverflexalpha/instance/resources_gen/instance_resource_gen.go
index 671c7fd3..58cbf8d1 100644
--- a/stackit/internal/services/sqlserverflexalpha/instance/resources_gen/instance_resource_gen.go
+++ b/stackit/internal/services/sqlserverflexalpha/instance/resources_gen/instance_resource_gen.go
@@ -26,11 +26,6 @@ func InstanceResourceSchema(ctx context.Context) schema.Schema {
Description: "The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.",
MarkdownDescription: "The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.",
},
- "edition": schema.StringAttribute{
- Computed: true,
- Description: "Edition of the MSSQL server instance",
- MarkdownDescription: "Edition of the MSSQL server instance",
- },
"encryption": schema.SingleNestedAttribute{
Attributes: map[string]schema.Attribute{
"kek_key_id": schema.StringAttribute{
@@ -78,11 +73,6 @@ func InstanceResourceSchema(ctx context.Context) schema.Schema {
Description: "The ID of the instance.",
MarkdownDescription: "The ID of the instance.",
},
- "is_deletable": schema.BoolAttribute{
- Computed: true,
- Description: "Whether the instance can be deleted or not.",
- MarkdownDescription: "Whether the instance can be deleted or not.",
- },
"name": schema.StringAttribute{
Required: true,
Description: "The name of the instance.",
@@ -109,12 +99,6 @@ func InstanceResourceSchema(ctx context.Context) schema.Schema {
Description: "List of IPV4 cidr.",
MarkdownDescription: "List of IPV4 cidr.",
},
- "instance_address": schema.StringAttribute{
- Computed: true,
- },
- "router_address": schema.StringAttribute{
- Computed: true,
- },
},
CustomType: NetworkType{
ObjectType: types.ObjectType{
@@ -142,19 +126,11 @@ func InstanceResourceSchema(ctx context.Context) schema.Schema {
),
},
},
- "replicas": schema.Int64Attribute{
- Computed: true,
- Description: "How many replicas the instance should have.",
- MarkdownDescription: "How many replicas the instance should have.",
- },
"retention_days": schema.Int64Attribute{
Required: true,
Description: "The days for how long the backup files should be stored before cleaned up. 30 to 365",
MarkdownDescription: "The days for how long the backup files should be stored before cleaned up. 30 to 365",
},
- "status": schema.StringAttribute{
- Computed: true,
- },
"storage": schema.SingleNestedAttribute{
Attributes: map[string]schema.Attribute{
"class": schema.StringAttribute{
@@ -193,19 +169,15 @@ func InstanceResourceSchema(ctx context.Context) schema.Schema {
type InstanceModel struct {
BackupSchedule types.String `tfsdk:"backup_schedule"`
- Edition types.String `tfsdk:"edition"`
Encryption EncryptionValue `tfsdk:"encryption"`
FlavorId types.String `tfsdk:"flavor_id"`
Id types.String `tfsdk:"id"`
InstanceId types.String `tfsdk:"instance_id"`
- IsDeletable types.Bool `tfsdk:"is_deletable"`
Name types.String `tfsdk:"name"`
Network NetworkValue `tfsdk:"network"`
ProjectId types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"`
- Replicas types.Int64 `tfsdk:"replicas"`
RetentionDays types.Int64 `tfsdk:"retention_days"`
- Status types.String `tfsdk:"status"`
Storage StorageValue `tfsdk:"storage"`
Version types.String `tfsdk:"version"`
}
@@ -760,52 +732,14 @@ func (t NetworkType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
fmt.Sprintf(`acl expected to be basetypes.ListValue, was: %T`, aclAttribute))
}
- instanceAddressAttribute, ok := attributes["instance_address"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `instance_address is missing from object`)
-
- return nil, diags
- }
-
- instanceAddressVal, ok := instanceAddressAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`instance_address expected to be basetypes.StringValue, was: %T`, instanceAddressAttribute))
- }
-
- routerAddressAttribute, ok := attributes["router_address"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `router_address is missing from object`)
-
- return nil, diags
- }
-
- routerAddressVal, ok := routerAddressAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`router_address expected to be basetypes.StringValue, was: %T`, routerAddressAttribute))
- }
-
if diags.HasError() {
return nil, diags
}
return NetworkValue{
- AccessScope: accessScopeVal,
- Acl: aclVal,
- InstanceAddress: instanceAddressVal,
- RouterAddress: routerAddressVal,
- state: attr.ValueStateKnown,
+ AccessScope: accessScopeVal,
+ Acl: aclVal,
+ state: attr.ValueStateKnown,
}, diags
}
@@ -908,52 +842,14 @@ func NewNetworkValue(attributeTypes map[string]attr.Type, attributes map[string]
fmt.Sprintf(`acl expected to be basetypes.ListValue, was: %T`, aclAttribute))
}
- instanceAddressAttribute, ok := attributes["instance_address"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `instance_address is missing from object`)
-
- return NewNetworkValueUnknown(), diags
- }
-
- instanceAddressVal, ok := instanceAddressAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`instance_address expected to be basetypes.StringValue, was: %T`, instanceAddressAttribute))
- }
-
- routerAddressAttribute, ok := attributes["router_address"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `router_address is missing from object`)
-
- return NewNetworkValueUnknown(), diags
- }
-
- routerAddressVal, ok := routerAddressAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`router_address expected to be basetypes.StringValue, was: %T`, routerAddressAttribute))
- }
-
if diags.HasError() {
return NewNetworkValueUnknown(), diags
}
return NetworkValue{
- AccessScope: accessScopeVal,
- Acl: aclVal,
- InstanceAddress: instanceAddressVal,
- RouterAddress: routerAddressVal,
- state: attr.ValueStateKnown,
+ AccessScope: accessScopeVal,
+ Acl: aclVal,
+ state: attr.ValueStateKnown,
}, diags
}
@@ -1025,15 +921,13 @@ func (t NetworkType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = NetworkValue{}
type NetworkValue struct {
- AccessScope basetypes.StringValue `tfsdk:"access_scope"`
- Acl basetypes.ListValue `tfsdk:"acl"`
- InstanceAddress basetypes.StringValue `tfsdk:"instance_address"`
- RouterAddress basetypes.StringValue `tfsdk:"router_address"`
- state attr.ValueState
+ AccessScope basetypes.StringValue `tfsdk:"access_scope"`
+ Acl basetypes.ListValue `tfsdk:"acl"`
+ state attr.ValueState
}
func (v NetworkValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 4)
+ attrTypes := make(map[string]tftypes.Type, 2)
var val tftypes.Value
var err error
@@ -1042,14 +936,12 @@ func (v NetworkValue) ToTerraformValue(ctx context.Context) (tftypes.Value, erro
attrTypes["acl"] = basetypes.ListType{
ElemType: types.StringType,
}.TerraformType(ctx)
- attrTypes["instance_address"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["router_address"] = basetypes.StringType{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
switch v.state {
case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 4)
+ vals := make(map[string]tftypes.Value, 2)
val, err = v.AccessScope.ToTerraformValue(ctx)
@@ -1067,22 +959,6 @@ func (v NetworkValue) ToTerraformValue(ctx context.Context) (tftypes.Value, erro
vals["acl"] = val
- val, err = v.InstanceAddress.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["instance_address"] = val
-
- val, err = v.RouterAddress.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["router_address"] = val
-
if err := tftypes.ValidateValue(objectType, vals); err != nil {
return tftypes.NewValue(objectType, tftypes.UnknownValue), err
}
@@ -1130,8 +1006,6 @@ func (v NetworkValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue,
"acl": basetypes.ListType{
ElemType: types.StringType,
},
- "instance_address": basetypes.StringType{},
- "router_address": basetypes.StringType{},
}), diags
}
@@ -1140,8 +1014,6 @@ func (v NetworkValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue,
"acl": basetypes.ListType{
ElemType: types.StringType,
},
- "instance_address": basetypes.StringType{},
- "router_address": basetypes.StringType{},
}
if v.IsNull() {
@@ -1155,10 +1027,8 @@ func (v NetworkValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue,
objVal, diags := types.ObjectValue(
attributeTypes,
map[string]attr.Value{
- "access_scope": v.AccessScope,
- "acl": aclVal,
- "instance_address": v.InstanceAddress,
- "router_address": v.RouterAddress,
+ "access_scope": v.AccessScope,
+ "acl": aclVal,
})
return objVal, diags
@@ -1187,14 +1057,6 @@ func (v NetworkValue) Equal(o attr.Value) bool {
return false
}
- if !v.InstanceAddress.Equal(other.InstanceAddress) {
- return false
- }
-
- if !v.RouterAddress.Equal(other.RouterAddress) {
- return false
- }
-
return true
}
@@ -1212,8 +1074,6 @@ func (v NetworkValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
"acl": basetypes.ListType{
ElemType: types.StringType,
},
- "instance_address": basetypes.StringType{},
- "router_address": basetypes.StringType{},
}
}
diff --git a/stackit/internal/services/sqlserverflexalpha/main.go b/stackit/internal/services/sqlserverflexalpha/main.go
new file mode 100644
index 00000000..7ec38cdc
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexalpha/main.go
@@ -0,0 +1 @@
+package sqlserverflexalpha
diff --git a/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go b/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go
index 6d6354ea..cd841d28 100644
--- a/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go
+++ b/stackit/internal/services/sqlserverflexalpha/sqlserverflex_acc_test.go
@@ -1,396 +1,483 @@
+// Copyright (c) STACKIT
+
package sqlserverflexalpha_test
import (
"context"
_ "embed"
"fmt"
- "os"
- "strconv"
+ "maps"
+ "strings"
"testing"
+ "github.com/hashicorp/terraform-plugin-testing/config"
"github.com/hashicorp/terraform-plugin-testing/helper/acctest"
"github.com/hashicorp/terraform-plugin-testing/helper/resource"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/internal/testutils"
- sqlserverflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance"
-
- // The fwresource import alias is so there is no collision
- // with the more typical acceptance testing import:
- // "github.com/hashicorp/terraform-plugin-testing/helper/resource"
- fwresource "github.com/hashicorp/terraform-plugin-framework/resource"
+ "github.com/hashicorp/terraform-plugin-testing/terraform"
+ coreconfig "github.com/stackitcloud/stackit-sdk-go/core/config"
+ "github.com/stackitcloud/stackit-sdk-go/core/utils"
+ "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex"
+ "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/wait"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/testutil"
)
-const providerPrefix = "stackitprivatepreview_sqlserverflexalpha"
+var (
+ //go:embed testdata/resource-max.tf
+ resourceMaxConfig string
+ //go:embed testdata/resource-min.tf
+ resourceMinConfig string
+)
-var testInstances []string
-
-func TestInstanceResourceSchema(t *testing.T) {
- t.Parallel()
-
- ctx := context.Background()
- schemaRequest := fwresource.SchemaRequest{}
- schemaResponse := &fwresource.SchemaResponse{}
-
- // Instantiate the resource.Resource and call its Schema method
- sqlserverflexalpha.NewInstanceResource().Schema(ctx, schemaRequest, schemaResponse)
-
- if schemaResponse.Diagnostics.HasError() {
- t.Fatalf("Schema method diagnostics: %+v", schemaResponse.Diagnostics)
- }
-
- // Validate the schema
- diagnostics := schemaResponse.Schema.ValidateImplementation(ctx)
-
- if diagnostics.HasError() {
- t.Fatalf("Schema validation diagnostics: %+v", diagnostics)
- }
+var testConfigVarsMin = config.Variables{
+ "project_id": config.StringVariable(testutil.ProjectId),
+ "name": config.StringVariable(fmt.Sprintf("tf-acc-%s", acctest.RandStringFromCharSet(7, acctest.CharSetAlphaNum))),
+ "flavor_cpu": config.IntegerVariable(4),
+ "flavor_ram": config.IntegerVariable(16),
+ "flavor_description": config.StringVariable("SQLServer-Flex-4.16-Standard-EU01"),
+ "replicas": config.IntegerVariable(1),
+ "flavor_id": config.StringVariable("4.16-Single"),
+ "username": config.StringVariable(fmt.Sprintf("tf-acc-user-%s", acctest.RandStringFromCharSet(7, acctest.CharSetAlpha))),
+ "role": config.StringVariable("##STACKIT_LoginManager##"),
}
-func TestMain(m *testing.M) {
- testutils.Setup()
- code := m.Run()
- // shutdown()
- os.Exit(code)
+var testConfigVarsMax = config.Variables{
+ "project_id": config.StringVariable(testutil.ProjectId),
+ "name": config.StringVariable(fmt.Sprintf("tf-acc-%s", acctest.RandStringFromCharSet(7, acctest.CharSetAlphaNum))),
+ "acl1": config.StringVariable("192.168.0.0/16"),
+ "flavor_cpu": config.IntegerVariable(4),
+ "flavor_ram": config.IntegerVariable(16),
+ "flavor_description": config.StringVariable("SQLServer-Flex-4.16-Standard-EU01"),
+ "storage_class": config.StringVariable("premium-perf2-stackit"),
+ "storage_size": config.IntegerVariable(40),
+ "server_version": config.StringVariable("2022"),
+ "replicas": config.IntegerVariable(1),
+ "options_retention_days": config.IntegerVariable(64),
+ "flavor_id": config.StringVariable("4.16-Single"),
+ "backup_schedule": config.StringVariable("00 6 * * *"),
+ "username": config.StringVariable(fmt.Sprintf("tf-acc-user-%s", acctest.RandStringFromCharSet(7, acctest.CharSetAlpha))),
+ "role": config.StringVariable("##STACKIT_LoginManager##"),
+ "region": config.StringVariable(testutil.Region),
}
-func testAccPreCheck(t *testing.T) {
- if _, ok := os.LookupEnv("TF_ACC_PROJECT_ID"); !ok {
- t.Fatalf("could not find env var TF_ACC_PROJECT_ID")
- }
+func configVarsMinUpdated() config.Variables {
+ temp := maps.Clone(testConfigVarsMax)
+ temp["name"] = config.StringVariable(testutil.ConvertConfigVariable(temp["name"]) + "changed")
+ return temp
}
-type resData struct {
- ServiceAccountFilePath string
- ProjectID string
- Region string
- Name string
- TfName string
- FlavorID string
- BackupSchedule string
- UseEncryption bool
- KekKeyID string
- KekKeyRingID string
- KekKeyVersion uint8
- KekServiceAccount string
- PerformanceClass string
- Size uint32
- ACLString string
- AccessScope string
- RetentionDays uint32
- Version string
- Users []User
- Databases []Database
+func configVarsMaxUpdated() config.Variables {
+ temp := maps.Clone(testConfigVarsMax)
+ temp["backup_schedule"] = config.StringVariable("00 12 * * *")
+ return temp
}
-type User struct {
- Name string
- ProjectID string
- Roles []string
-}
-
-type Database struct {
- Name string
- ProjectID string
- Owner string
- Collation string
- Compatibility string
-}
-
-func resName(res, name string) string {
- return fmt.Sprintf("%s_%s.%s", providerPrefix, res, name)
-}
-
-func getExample() resData {
- name := acctest.RandomWithPrefix("tf-acc")
- return resData{
- Region: os.Getenv("TF_ACC_REGION"),
- ServiceAccountFilePath: os.Getenv("TF_ACC_SERVICE_ACCOUNT_FILE"),
- ProjectID: os.Getenv("TF_ACC_PROJECT_ID"),
- Name: name,
- TfName: name,
- FlavorID: "4.16-Single",
- BackupSchedule: "0 0 * * *",
- UseEncryption: false,
- RetentionDays: 33,
- PerformanceClass: "premium-perf2-stackit",
- Size: 10,
- ACLString: "0.0.0.0/0",
- AccessScope: "PUBLIC",
- Version: "2022",
- }
-}
-
-func TestAccInstance(t *testing.T) {
- exData := getExample()
-
- updNameData := exData
- updNameData.Name = "name-updated"
-
- updSizeData := exData
- updSizeData.Size = 25
-
- resource.ParallelTest(t, resource.TestCase{
- PreCheck: func() {
- testAccPreCheck(t)
- t.Logf(" ... working on instance %s", exData.TfName)
- testInstances = append(testInstances, exData.TfName)
- },
- ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
+func TestAccSQLServerFlexMinResource(t *testing.T) {
+ resource.Test(t, resource.TestCase{
+ ProtoV6ProviderFactories: testutil.TestAccProtoV6ProviderFactories,
+ CheckDestroy: testAccChecksqlserverflexDestroy,
Steps: []resource.TestStep{
- // Create and verify
+ // Creation
{
- Config: testutils.StringFromTemplateMust(
- "testdata/instance_template.gompl",
- exData,
- ),
+ Config: testutil.SQLServerFlexProviderConfig() + "\n" + resourceMinConfig,
+ ConfigVariables: testConfigVarsMin,
Check: resource.ComposeAggregateTestCheckFunc(
- resource.TestCheckResourceAttr(resName("instance", exData.TfName), "name", exData.Name),
- resource.TestCheckResourceAttrSet(resName("instance", exData.TfName), "id"),
- // TODO: check all fields
- ),
- },
- // Update name and verify
- {
- Config: testutils.StringFromTemplateMust(
- "testdata/instance_template.gompl",
- updNameData,
- ),
- Check: resource.ComposeTestCheckFunc(
- resource.TestCheckResourceAttr(resName("instance", exData.TfName), "name", updNameData.Name),
- ),
- },
- // Update size and verify
- {
- Config: testutils.StringFromTemplateMust(
- "testdata/instance_template.gompl",
- updSizeData,
- ),
- Check: resource.ComposeTestCheckFunc(
- resource.TestCheckResourceAttr(
- testutils.ResStr(providerPrefix, "instance", exData.TfName),
- "storage.size",
- strconv.Itoa(int(updSizeData.Size)),
+ // Instance
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutil.ConvertConfigVariable(testConfigVarsMin["project_id"])),
+ resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutil.ConvertConfigVariable(testConfigVarsMin["name"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"),
+ resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.description", testutil.ConvertConfigVariable(testConfigVarsMin["flavor_description"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "replicas", testutil.ConvertConfigVariable(testConfigVarsMin["replicas"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutil.ConvertConfigVariable(testConfigVarsMin["flavor_cpu"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutil.ConvertConfigVariable(testConfigVarsMin["flavor_ram"])),
+ // User
+ resource.TestCheckResourceAttrPair(
+ "stackit_sqlserverflex_user.user", "project_id",
+ "stackit_sqlserverflex_instance.instance", "project_id",
),
+ resource.TestCheckResourceAttrPair(
+ "stackit_sqlserverflex_user.user", "instance_id",
+ "stackit_sqlserverflex_instance.instance", "instance_id",
+ ),
+ resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "user_id"),
+ resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "password"),
),
},
+ // Update
{
- RefreshState: true,
- },
- //// Import test
- //{
- // ResourceName: resName("instance", exData.TfName),
- // ImportState: true,
- // ImportStateVerify: true,
- // },
- },
- })
-}
-
-func TestAccInstanceNoEncryption(t *testing.T) {
- data := getExample()
-
- dbName := "testDb"
- userName := "testUser"
- data.Users = []User{
- {
- Name: userName,
- ProjectID: os.Getenv("TF_ACC_PROJECT_ID"),
- Roles: []string{
- "##STACKIT_DatabaseManager##",
- "##STACKIT_LoginManager##",
- //"##STACKIT_ProcessManager##",
- //"##STACKIT_SQLAgentManager##",
- //"##STACKIT_SQLAgentUser##",
- //"##STACKIT_ServerManager##",
- },
- },
- }
- data.Databases = []Database{
- {
- Name: dbName,
- ProjectID: os.Getenv("TF_ACC_PROJECT_ID"),
- Owner: userName,
- },
- }
-
- resource.ParallelTest(t, resource.TestCase{
- PreCheck: func() {
- testAccPreCheck(t)
- t.Logf(" ... working on instance %s", data.TfName)
- testInstances = append(testInstances, data.TfName)
- },
- ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
- Steps: []resource.TestStep{
- // Create and verify
- {
- Config: testutils.StringFromTemplateMust(
- "testdata/instance_template.gompl",
- data,
- ),
+ Config: testutil.SQLServerFlexProviderConfig() + "\n" + resourceMinConfig,
+ ConfigVariables: testConfigVarsMin,
Check: resource.ComposeAggregateTestCheckFunc(
- // check instance values are set
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "id"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "backup_schedule"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "edition"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "flavor_id"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "instance_id"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "is_deletable"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "name"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "replicas"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "retention_days"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "status"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "version"),
-
- resource.TestCheckNoResourceAttr(resName("instance", data.TfName), "encryption"),
-
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_id"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_version"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"),
-
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.access_scope"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.acl"),
-
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.instance_address"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.router_address"),
-
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "storage.class"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "storage.size"),
-
- // check instance values are correct
- resource.TestCheckResourceAttr(resName("instance", data.TfName), "name", data.Name),
-
- // check user values are set
- resource.TestCheckResourceAttrSet(resName("user", userName), "id"),
- resource.TestCheckResourceAttrSet(resName("user", userName), "username"),
- // resource.TestCheckResourceAttrSet(resName("user", userName), "roles"),
-
- // func(s *terraform.State) error {
- // return nil
- // },
-
- // check user values are correct
- resource.TestCheckResourceAttr(resName("user", userName), "username", userName),
- resource.TestCheckResourceAttr(resName("user", userName), "roles.#", strconv.Itoa(len(data.Users[0].Roles))),
-
- // check database values are set
- resource.TestCheckResourceAttrSet(resName("database", dbName), "id"),
- resource.TestCheckResourceAttrSet(resName("database", dbName), "name"),
- resource.TestCheckResourceAttrSet(resName("database", dbName), "owner"),
- resource.TestCheckResourceAttrSet(resName("database", dbName), "compatibility"),
- resource.TestCheckResourceAttrSet(resName("database", dbName), "collation"),
-
- // check database values are correct
- resource.TestCheckResourceAttr(resName("database", dbName), "name", dbName),
- resource.TestCheckResourceAttr(resName("database", dbName), "owner", userName),
+ // Instance
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutil.ConvertConfigVariable(testConfigVarsMin["project_id"])),
+ resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutil.ConvertConfigVariable(testConfigVarsMin["name"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"),
+ resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.description", testutil.ConvertConfigVariable(testConfigVarsMin["flavor_description"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutil.ConvertConfigVariable(testConfigVarsMin["flavor_cpu"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutil.ConvertConfigVariable(testConfigVarsMin["flavor_ram"])),
+ // User
+ resource.TestCheckResourceAttrPair(
+ "stackit_sqlserverflex_user.user", "project_id",
+ "stackit_sqlserverflex_instance.instance", "project_id",
+ ),
+ resource.TestCheckResourceAttrPair(
+ "stackit_sqlserverflex_user.user", "instance_id",
+ "stackit_sqlserverflex_instance.instance", "instance_id",
+ ),
+ resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "user_id"),
+ resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "password"),
),
},
+ // data source
+ {
+ Config: testutil.SQLServerFlexProviderConfig() + "\n" + resourceMinConfig,
+ ConfigVariables: testConfigVarsMin,
+ Check: resource.ComposeAggregateTestCheckFunc(
+ // Instance data
+ resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "project_id", testutil.ConvertConfigVariable(testConfigVarsMin["project_id"])),
+ resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "name", testutil.ConvertConfigVariable(testConfigVarsMin["name"])),
+ resource.TestCheckResourceAttrPair(
+ "data.stackit_sqlserverflex_instance.instance", "project_id",
+ "stackit_sqlserverflex_instance.instance", "project_id",
+ ),
+ resource.TestCheckResourceAttrPair(
+ "data.stackit_sqlserverflex_instance.instance", "instance_id",
+ "stackit_sqlserverflex_instance.instance", "instance_id",
+ ),
+ resource.TestCheckResourceAttrPair(
+ "data.stackit_sqlserverflex_user.user", "instance_id",
+ "stackit_sqlserverflex_user.user", "instance_id",
+ ),
+
+ resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "acl.#", "1"),
+ resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.id", testutil.ConvertConfigVariable(testConfigVarsMin["flavor_id"])),
+ resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.description", testutil.ConvertConfigVariable(testConfigVarsMin["flavor_description"])),
+ resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.cpu", testutil.ConvertConfigVariable(testConfigVarsMin["flavor_cpu"])),
+ resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.ram", testutil.ConvertConfigVariable(testConfigVarsMin["flavor_ram"])),
+
+ // User data
+ resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "project_id", testutil.ConvertConfigVariable(testConfigVarsMin["project_id"])),
+ resource.TestCheckResourceAttrSet("data.stackit_sqlserverflex_user.user", "user_id"),
+ resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "username", testutil.ConvertConfigVariable(testConfigVarsMin["username"])),
+ resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "roles.#", "1"),
+ resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "roles.0", testutil.ConvertConfigVariable(testConfigVarsMax["role"])),
+ ),
+ },
+ // Import
+ {
+ ConfigVariables: testConfigVarsMin,
+ ResourceName: "stackit_sqlserverflex_instance.instance",
+ ImportStateIdFunc: func(s *terraform.State) (string, error) {
+ r, ok := s.RootModule().Resources["stackit_sqlserverflex_instance.instance"]
+ if !ok {
+ return "", fmt.Errorf("couldn't find resource stackit_sqlserverflex_instance.instance")
+ }
+ instanceId, ok := r.Primary.Attributes["instance_id"]
+ if !ok {
+ return "", fmt.Errorf("couldn't find attribute instance_id")
+ }
+
+ return fmt.Sprintf("%s,%s,%s", testutil.ProjectId, testutil.Region, instanceId), nil
+ },
+ ImportState: true,
+ ImportStateVerify: true,
+ ImportStateVerifyIgnore: []string{"backup_schedule"},
+ ImportStateCheck: func(s []*terraform.InstanceState) error {
+ if len(s) != 1 {
+ return fmt.Errorf("expected 1 state, got %d", len(s))
+ }
+ return nil
+ },
+ },
+ {
+ ResourceName: "stackit_sqlserverflex_user.user",
+ ConfigVariables: testConfigVarsMin,
+ ImportStateIdFunc: func(s *terraform.State) (string, error) {
+ r, ok := s.RootModule().Resources["stackit_sqlserverflex_user.user"]
+ if !ok {
+ return "", fmt.Errorf("couldn't find resource stackit_sqlserverflex_user.user")
+ }
+ instanceId, ok := r.Primary.Attributes["instance_id"]
+ if !ok {
+ return "", fmt.Errorf("couldn't find attribute instance_id")
+ }
+ userId, ok := r.Primary.Attributes["user_id"]
+ if !ok {
+ return "", fmt.Errorf("couldn't find attribute user_id")
+ }
+
+ return fmt.Sprintf("%s,%s,%s,%s", testutil.ProjectId, testutil.Region, instanceId, userId), nil
+ },
+ ImportState: true,
+ ImportStateVerify: true,
+ ImportStateVerifyIgnore: []string{"password"},
+ },
+ // Update
+ {
+ Config: testutil.SQLServerFlexProviderConfig() + "\n" + resourceMinConfig,
+ ConfigVariables: configVarsMinUpdated(),
+ Check: resource.ComposeAggregateTestCheckFunc(
+ // Instance data
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutil.ConvertConfigVariable(configVarsMinUpdated()["project_id"])),
+ resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutil.ConvertConfigVariable(configVarsMinUpdated()["name"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"),
+ resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"),
+ resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.description"),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutil.ConvertConfigVariable(configVarsMinUpdated()["flavor_cpu"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutil.ConvertConfigVariable(configVarsMinUpdated()["flavor_ram"])),
+ ),
+ },
+ // Deletion is done by the framework implicitly
},
})
}
-func TestAccInstanceEncryption(t *testing.T) {
- data := getExample()
+func TestAccSQLServerFlexMaxResource(t *testing.T) {
+ resource.Test(t, resource.TestCase{
+ ProtoV6ProviderFactories: testutil.TestAccProtoV6ProviderFactories,
+ CheckDestroy: testAccChecksqlserverflexDestroy,
+ Steps: []resource.TestStep{
+ // Creation
+ {
+ Config: testutil.SQLServerFlexProviderConfig() + "\n" + resourceMaxConfig,
+ ConfigVariables: testConfigVarsMax,
+ Check: resource.ComposeAggregateTestCheckFunc(
+ // Instance
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutil.ConvertConfigVariable(testConfigVarsMax["project_id"])),
+ resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutil.ConvertConfigVariable(testConfigVarsMax["name"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.0", testutil.ConvertConfigVariable(testConfigVarsMax["acl1"])),
+ resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.description", testutil.ConvertConfigVariable(testConfigVarsMax["flavor_description"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "replicas", testutil.ConvertConfigVariable(testConfigVarsMax["replicas"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutil.ConvertConfigVariable(testConfigVarsMax["flavor_cpu"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutil.ConvertConfigVariable(testConfigVarsMax["flavor_ram"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.class", testutil.ConvertConfigVariable(testConfigVarsMax["storage_class"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.size", testutil.ConvertConfigVariable(testConfigVarsMax["storage_size"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "version", testutil.ConvertConfigVariable(testConfigVarsMax["server_version"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "options.retention_days", testutil.ConvertConfigVariable(testConfigVarsMax["options_retention_days"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "backup_schedule", testutil.ConvertConfigVariable(testConfigVarsMax["backup_schedule"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "region", testutil.Region),
+ // User
+ resource.TestCheckResourceAttrPair(
+ "stackit_sqlserverflex_user.user", "project_id",
+ "stackit_sqlserverflex_instance.instance", "project_id",
+ ),
+ resource.TestCheckResourceAttrPair(
+ "stackit_sqlserverflex_user.user", "instance_id",
+ "stackit_sqlserverflex_instance.instance", "instance_id",
+ ),
+ resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "user_id"),
+ resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "password"),
+ ),
+ },
+ // Update
+ {
+ Config: testutil.SQLServerFlexProviderConfig() + "\n" + resourceMaxConfig,
+ ConfigVariables: testConfigVarsMax,
+ Check: resource.ComposeAggregateTestCheckFunc(
+ // Instance
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutil.ConvertConfigVariable(testConfigVarsMax["project_id"])),
+ resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutil.ConvertConfigVariable(testConfigVarsMax["name"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.0", testutil.ConvertConfigVariable(testConfigVarsMax["acl1"])),
+ resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.description", testutil.ConvertConfigVariable(testConfigVarsMax["flavor_description"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "replicas", testutil.ConvertConfigVariable(testConfigVarsMax["replicas"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutil.ConvertConfigVariable(testConfigVarsMax["flavor_cpu"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutil.ConvertConfigVariable(testConfigVarsMax["flavor_ram"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.class", testutil.ConvertConfigVariable(testConfigVarsMax["storage_class"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.size", testutil.ConvertConfigVariable(testConfigVarsMax["storage_size"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "version", testutil.ConvertConfigVariable(testConfigVarsMax["server_version"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "options.retention_days", testutil.ConvertConfigVariable(testConfigVarsMax["options_retention_days"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "backup_schedule", testutil.ConvertConfigVariable(testConfigVarsMax["backup_schedule"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "region", testutil.Region),
+ // User
+ resource.TestCheckResourceAttrPair(
+ "stackit_sqlserverflex_user.user", "project_id",
+ "stackit_sqlserverflex_instance.instance", "project_id",
+ ),
+ resource.TestCheckResourceAttrPair(
+ "stackit_sqlserverflex_user.user", "instance_id",
+ "stackit_sqlserverflex_instance.instance", "instance_id",
+ ),
+ resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "user_id"),
+ resource.TestCheckResourceAttrSet("stackit_sqlserverflex_user.user", "password"),
+ ),
+ },
+ // data source
+ {
+ Config: testutil.SQLServerFlexProviderConfig() + "\n" + resourceMaxConfig,
+ ConfigVariables: testConfigVarsMax,
+ Check: resource.ComposeAggregateTestCheckFunc(
+ // Instance data
+ resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "project_id", testutil.ConvertConfigVariable(testConfigVarsMax["project_id"])),
+ resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "name", testutil.ConvertConfigVariable(testConfigVarsMax["name"])),
+ resource.TestCheckResourceAttrPair(
+ "data.stackit_sqlserverflex_instance.instance", "project_id",
+ "stackit_sqlserverflex_instance.instance", "project_id",
+ ),
+ resource.TestCheckResourceAttrPair(
+ "data.stackit_sqlserverflex_instance.instance", "instance_id",
+ "stackit_sqlserverflex_instance.instance", "instance_id",
+ ),
+ resource.TestCheckResourceAttrPair(
+ "data.stackit_sqlserverflex_user.user", "instance_id",
+ "stackit_sqlserverflex_user.user", "instance_id",
+ ),
- dbName := "testDb"
- userName := "testUser"
- data.Users = []User{
- {
- Name: userName,
- ProjectID: os.Getenv("TF_ACC_PROJECT_ID"),
- Roles: []string{"##STACKIT_DatabaseManager##", "##STACKIT_LoginManager##"},
- },
- }
- data.Databases = []Database{
- {
- Name: dbName,
- ProjectID: os.Getenv("TF_ACC_PROJECT_ID"),
- Owner: userName,
- },
- }
+ resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "acl.#", "1"),
+ resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "acl.0", testutil.ConvertConfigVariable(testConfigVarsMax["acl1"])),
+ resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.id", testutil.ConvertConfigVariable(testConfigVarsMax["flavor_id"])),
+ resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.description", testutil.ConvertConfigVariable(testConfigVarsMax["flavor_description"])),
+ resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.cpu", testutil.ConvertConfigVariable(testConfigVarsMax["flavor_cpu"])),
+ resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "flavor.ram", testutil.ConvertConfigVariable(testConfigVarsMax["flavor_ram"])),
+ resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "replicas", testutil.ConvertConfigVariable(testConfigVarsMax["replicas"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "options.retention_days", testutil.ConvertConfigVariable(testConfigVarsMax["options_retention_days"])),
+ resource.TestCheckResourceAttr("data.stackit_sqlserverflex_instance.instance", "backup_schedule", testutil.ConvertConfigVariable(testConfigVarsMax["backup_schedule"])),
- data.UseEncryption = true
- data.KekKeyID = os.Getenv("TF_ACC_KEK_KEY_ID")
- data.KekKeyRingID = os.Getenv("TF_ACC_KEK_KEY_RING_ID")
- verString := os.Getenv("TF_ACC_KEK_KEY_VERSION")
- version, err := strconv.ParseInt(verString, 0, 32)
+ // User data
+ resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "project_id", testutil.ConvertConfigVariable(testConfigVarsMax["project_id"])),
+ resource.TestCheckResourceAttrSet("data.stackit_sqlserverflex_user.user", "user_id"),
+ resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "username", testutil.ConvertConfigVariable(testConfigVarsMax["username"])),
+ resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "roles.#", "1"),
+ resource.TestCheckResourceAttr("data.stackit_sqlserverflex_user.user", "roles.0", testutil.ConvertConfigVariable(testConfigVarsMax["role"])),
+ resource.TestCheckResourceAttrSet("data.stackit_sqlserverflex_user.user", "host"),
+ resource.TestCheckResourceAttrSet("data.stackit_sqlserverflex_user.user", "port"),
+ ),
+ },
+ // Import
+ {
+ ConfigVariables: testConfigVarsMax,
+ ResourceName: "stackit_sqlserverflex_instance.instance",
+ ImportStateIdFunc: func(s *terraform.State) (string, error) {
+ r, ok := s.RootModule().Resources["stackit_sqlserverflex_instance.instance"]
+ if !ok {
+ return "", fmt.Errorf("couldn't find resource stackit_sqlserverflex_instance.instance")
+ }
+ instanceId, ok := r.Primary.Attributes["instance_id"]
+ if !ok {
+ return "", fmt.Errorf("couldn't find attribute instance_id")
+ }
+
+ return fmt.Sprintf("%s,%s,%s", testutil.ProjectId, testutil.Region, instanceId), nil
+ },
+ ImportState: true,
+ ImportStateVerify: true,
+ ImportStateVerifyIgnore: []string{"backup_schedule"},
+ ImportStateCheck: func(s []*terraform.InstanceState) error {
+ if len(s) != 1 {
+ return fmt.Errorf("expected 1 state, got %d", len(s))
+ }
+ if s[0].Attributes["backup_schedule"] != testutil.ConvertConfigVariable(testConfigVarsMax["backup_schedule"]) {
+ return fmt.Errorf("expected backup_schedule %s, got %s", testConfigVarsMax["backup_schedule"], s[0].Attributes["backup_schedule"])
+ }
+ return nil
+ },
+ },
+ {
+ ResourceName: "stackit_sqlserverflex_user.user",
+ ConfigVariables: testConfigVarsMax,
+ ImportStateIdFunc: func(s *terraform.State) (string, error) {
+ r, ok := s.RootModule().Resources["stackit_sqlserverflex_user.user"]
+ if !ok {
+ return "", fmt.Errorf("couldn't find resource stackit_sqlserverflex_user.user")
+ }
+ instanceId, ok := r.Primary.Attributes["instance_id"]
+ if !ok {
+ return "", fmt.Errorf("couldn't find attribute instance_id")
+ }
+ userId, ok := r.Primary.Attributes["user_id"]
+ if !ok {
+ return "", fmt.Errorf("couldn't find attribute user_id")
+ }
+
+ return fmt.Sprintf("%s,%s,%s,%s", testutil.ProjectId, testutil.Region, instanceId, userId), nil
+ },
+ ImportState: true,
+ ImportStateVerify: true,
+ ImportStateVerifyIgnore: []string{"password"},
+ },
+ // Update
+ {
+ Config: testutil.SQLServerFlexProviderConfig() + "\n" + resourceMaxConfig,
+ ConfigVariables: configVarsMaxUpdated(),
+ Check: resource.ComposeAggregateTestCheckFunc(
+ // Instance data
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "project_id", testutil.ConvertConfigVariable(configVarsMaxUpdated()["project_id"])),
+ resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "instance_id"),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "name", testutil.ConvertConfigVariable(configVarsMaxUpdated()["name"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.#", "1"),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "acl.0", testutil.ConvertConfigVariable(configVarsMaxUpdated()["acl1"])),
+ resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.id"),
+ resource.TestCheckResourceAttrSet("stackit_sqlserverflex_instance.instance", "flavor.description"),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.cpu", testutil.ConvertConfigVariable(configVarsMaxUpdated()["flavor_cpu"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "flavor.ram", testutil.ConvertConfigVariable(configVarsMaxUpdated()["flavor_ram"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "replicas", testutil.ConvertConfigVariable(configVarsMaxUpdated()["replicas"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.class", testutil.ConvertConfigVariable(configVarsMaxUpdated()["storage_class"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "storage.size", testutil.ConvertConfigVariable(configVarsMaxUpdated()["storage_size"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "version", testutil.ConvertConfigVariable(configVarsMaxUpdated()["server_version"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "options.retention_days", testutil.ConvertConfigVariable(configVarsMaxUpdated()["options_retention_days"])),
+ resource.TestCheckResourceAttr("stackit_sqlserverflex_instance.instance", "backup_schedule", testutil.ConvertConfigVariable(configVarsMaxUpdated()["backup_schedule"])),
+ ),
+ },
+ // Deletion is done by the framework implicitly
+ },
+ })
+}
+
+func testAccChecksqlserverflexDestroy(s *terraform.State) error {
+ ctx := context.Background()
+ var client *sqlserverflex.APIClient
+ var err error
+ if testutil.SQLServerFlexCustomEndpoint == "" {
+ client, err = sqlserverflex.NewAPIClient()
+ } else {
+ client, err = sqlserverflex.NewAPIClient(
+ coreconfig.WithEndpoint(testutil.SQLServerFlexCustomEndpoint),
+ )
+ }
if err != nil {
- t.Errorf("error coverting value to uint8: %+v", verString)
+ return fmt.Errorf("creating client: %w", err)
}
- data.KekKeyVersion = uint8(version) //nolint:gosec // not important its a test
- data.KekServiceAccount = os.Getenv("TF_ACC_KEK_SERVICE_ACCOUNT")
- resource.ParallelTest(t, resource.TestCase{
- PreCheck: func() {
- testAccPreCheck(t)
- t.Logf(" ... working on instance %s", data.TfName)
- testInstances = append(testInstances, data.TfName)
- },
- ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
- Steps: []resource.TestStep{
- // Create and verify
- {
- Config: testutils.StringFromTemplateMust(
- "testdata/instance_template.gompl",
- data,
- ),
- Check: resource.ComposeAggregateTestCheckFunc(
- // check instance values are set
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "id"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "backup_schedule"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "edition"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "flavor_id"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "instance_id"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "is_deletable"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "name"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "replicas"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "retention_days"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "status"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "version"),
+ instancesToDestroy := []string{}
+ for _, rs := range s.RootModule().Resources {
+ if rs.Type != "stackit_sqlserverflex_instance" {
+ continue
+ }
+ // instance terraform ID: = "[project_id],[region],[instance_id]"
+ instanceId := strings.Split(rs.Primary.ID, core.Separator)[2]
+ instancesToDestroy = append(instancesToDestroy, instanceId)
+ }
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_id"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_version"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"),
+ instancesResp, err := client.ListInstances(ctx, testutil.ProjectId, testutil.Region).Execute()
+ if err != nil {
+ return fmt.Errorf("getting instancesResp: %w", err)
+ }
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.access_scope"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.acl"),
-
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.instance_address"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.router_address"),
-
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "storage.class"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "storage.size"),
-
- // check instance values are correct
- resource.TestCheckResourceAttr(resName("instance", data.TfName), "name", data.Name),
-
- // check user values are set
- resource.TestCheckResourceAttrSet(resName("user", userName), "id"),
- resource.TestCheckResourceAttrSet(resName("user", userName), "username"),
-
- // func(s *terraform.State) error {
- // return nil
- // },
-
- // check user values are correct
- resource.TestCheckResourceAttr(resName("user", userName), "username", userName),
- resource.TestCheckResourceAttr(resName("user", userName), "roles.#", "2"),
-
- // check database values are set
- resource.TestCheckResourceAttrSet(resName("database", dbName), "id"),
- resource.TestCheckResourceAttrSet(resName("database", dbName), "name"),
- resource.TestCheckResourceAttrSet(resName("database", dbName), "owner"),
- resource.TestCheckResourceAttrSet(resName("database", dbName), "compatibility"),
- resource.TestCheckResourceAttrSet(resName("database", dbName), "collation"),
-
- // check database values are correct
- resource.TestCheckResourceAttr(resName("database", dbName), "name", dbName),
- resource.TestCheckResourceAttr(resName("database", dbName), "owner", userName),
- ),
- },
- },
- })
+ items := *instancesResp.Items
+ for i := range items {
+ if items[i].Id == nil {
+ continue
+ }
+ if utils.Contains(instancesToDestroy, *items[i].Id) {
+ err := client.DeleteInstanceExecute(ctx, testutil.ProjectId, *items[i].Id, testutil.Region)
+ if err != nil {
+ return fmt.Errorf("destroying instance %s during CheckDestroy: %w", *items[i].Id, err)
+ }
+ _, err = wait.DeleteInstanceWaitHandler(ctx, client, testutil.ProjectId, *items[i].Id, testutil.Region).WaitWithContext(ctx)
+ if err != nil {
+ return fmt.Errorf("destroying instance %s during CheckDestroy: waiting for deletion %w", *items[i].Id, err)
+ }
+ }
+ }
+ return nil
}
diff --git a/stackit/internal/services/sqlserverflexalpha/testdata/instance_template.gompl b/stackit/internal/services/sqlserverflexalpha/testdata/instance_template.gompl
deleted file mode 100644
index cc274fe9..00000000
--- a/stackit/internal/services/sqlserverflexalpha/testdata/instance_template.gompl
+++ /dev/null
@@ -1,60 +0,0 @@
-provider "stackitprivatepreview" {
- default_region = "{{ .Region }}"
- service_account_key_path = "{{ .ServiceAccountFilePath }}"
-}
-
-resource "stackitprivatepreview_sqlserverflexalpha_instance" "{{ .TfName }}" {
- project_id = "{{ .ProjectID }}"
- name = "{{ .Name }}"
- backup_schedule = "{{ .BackupSchedule }}"
- retention_days = {{ .RetentionDays }}
- flavor_id = "{{ .FlavorID }}"
- storage = {
- class = "{{ .PerformanceClass }}"
- size = {{ .Size }}
- }
-{{ if .UseEncryption }}
- encryption = {
- kek_key_id = "{{ .KekKeyID }}"
- kek_key_ring_id = "{{ .KekKeyRingID }}"
- kek_key_version = {{ .KekKeyVersion }}
- service_account = "{{ .KekServiceAccount }}"
- }
-{{ end }}
- network = {
- acl = ["{{ .ACLString }}"]
- access_scope = "{{ .AccessScope }}"
- }
- version = "{{ .Version }}"
-}
-
-{{ if .Users }}
-{{ $tfName := .TfName }}
-{{ range $user := .Users }}
-resource "stackitprivatepreview_sqlserverflexalpha_user" "{{ $user.Name }}" {
- project_id = "{{ $user.ProjectID }}"
- instance_id = stackitprivatepreview_sqlserverflexalpha_instance.{{ $tfName }}.instance_id
- username = "{{ $user.Name }}"
- roles = [{{ range $i, $v := $user.Roles }}{{if $i}},{{end}}"{{$v}}"{{end}}]
-}
-{{ end }}
-{{ end }}
-
-{{ if .Databases }}
-{{ $tfName := .TfName }}
-{{ range $db := .Databases }}
-resource "stackitprivatepreview_sqlserverflexalpha_database" "{{ $db.Name }}" {
- depends_on = [stackitprivatepreview_sqlserverflexalpha_user.{{ $db.Owner }}]
- project_id = "{{ $db.ProjectID }}"
- instance_id = stackitprivatepreview_sqlserverflexalpha_instance.{{ $tfName }}.instance_id
- name = "{{ $db.Name }}"
- owner = "{{ $db.Owner }}"
-{{ if $db.Collation }}
- collation = "{{ $db.Collation }}"
-{{ end }}
-{{ if $db.Compatibility }}
- compatibility = "{{ $db.Compatibility }}"
-{{ end }}
-}
-{{ end }}
-{{ end }}
diff --git a/stackit/internal/services/sqlserverflexalpha/testdata/resource-max.tf b/stackit/internal/services/sqlserverflexalpha/testdata/resource-max.tf
new file mode 100644
index 00000000..b365f096
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexalpha/testdata/resource-max.tf
@@ -0,0 +1,52 @@
+
+variable "project_id" {}
+variable "name" {}
+variable "acl1" {}
+variable "flavor_cpu" {}
+variable "flavor_ram" {}
+variable "storage_class" {}
+variable "storage_size" {}
+variable "options_retention_days" {}
+variable "backup_schedule" {}
+variable "username" {}
+variable "role" {}
+variable "server_version" {}
+variable "region" {}
+
+resource "stackit_sqlserverflex_instance" "instance" {
+ project_id = var.project_id
+ name = var.name
+ acl = [var.acl1]
+ flavor = {
+ cpu = var.flavor_cpu
+ ram = var.flavor_ram
+ }
+ storage = {
+ class = var.storage_class
+ size = var.storage_size
+ }
+ version = var.server_version
+ options = {
+ retention_days = var.options_retention_days
+ }
+ backup_schedule = var.backup_schedule
+ region = var.region
+}
+
+resource "stackit_sqlserverflex_user" "user" {
+ project_id = stackit_sqlserverflex_instance.instance.project_id
+ instance_id = stackit_sqlserverflex_instance.instance.instance_id
+ username = var.username
+ roles = [var.role]
+}
+
+data "stackit_sqlserverflex_instance" "instance" {
+ project_id = var.project_id
+ instance_id = stackit_sqlserverflex_instance.instance.instance_id
+}
+
+data "stackit_sqlserverflex_user" "user" {
+ project_id = var.project_id
+ instance_id = stackit_sqlserverflex_instance.instance.instance_id
+ user_id = stackit_sqlserverflex_user.user.user_id
+}
diff --git a/stackit/internal/services/sqlserverflexalpha/testdata/resource-min.tf b/stackit/internal/services/sqlserverflexalpha/testdata/resource-min.tf
new file mode 100644
index 00000000..3f17d5cc
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexalpha/testdata/resource-min.tf
@@ -0,0 +1,34 @@
+
+variable "project_id" {}
+variable "name" {}
+variable "flavor_cpu" {}
+variable "flavor_ram" {}
+variable "username" {}
+variable "role" {}
+
+resource "stackit_sqlserverflex_instance" "instance" {
+ project_id = var.project_id
+ name = var.name
+ flavor = {
+ cpu = var.flavor_cpu
+ ram = var.flavor_ram
+ }
+}
+
+resource "stackit_sqlserverflex_user" "user" {
+ project_id = stackit_sqlserverflex_instance.instance.project_id
+ instance_id = stackit_sqlserverflex_instance.instance.instance_id
+ username = var.username
+ roles = [var.role]
+}
+
+data "stackit_sqlserverflex_instance" "instance" {
+ project_id = var.project_id
+ instance_id = stackit_sqlserverflex_instance.instance.instance_id
+}
+
+data "stackit_sqlserverflex_user" "user" {
+ project_id = var.project_id
+ instance_id = stackit_sqlserverflex_instance.instance.instance_id
+ user_id = stackit_sqlserverflex_user.user.user_id
+}
diff --git a/stackit/internal/services/sqlserverflexalpha/user/datasource.go b/stackit/internal/services/sqlserverflexalpha/user/datasource.go
index 82d78697..9b083db0 100644
--- a/stackit/internal/services/sqlserverflexalpha/user/datasource.go
+++ b/stackit/internal/services/sqlserverflexalpha/user/datasource.go
@@ -4,47 +4,57 @@ import (
"context"
"fmt"
"net/http"
+ "strconv"
+
+ "github.com/hashicorp/terraform-plugin-framework-validators/int64validator"
+ "github.com/hashicorp/terraform-plugin-framework/attr"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
+ sqlserverflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils"
"github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-log/tflog"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- sqlserverflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/validate"
- sqlserverflexalphaPkg "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3alpha1api"
-
- sqlserverflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/user/datasources_gen"
+ "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/types"
)
-var _ datasource.DataSource = (*userDataSource)(nil)
+// Ensure the implementation satisfies the expected interfaces.
+var (
+ _ datasource.DataSource = &userDataSource{}
+)
+type DataSourceModel struct {
+ Id types.String `tfsdk:"id"` // needed by TF
+ UserId types.Int64 `tfsdk:"user_id"`
+ InstanceId types.String `tfsdk:"instance_id"`
+ ProjectId types.String `tfsdk:"project_id"`
+ Username types.String `tfsdk:"username"`
+ Roles types.Set `tfsdk:"roles"`
+ Host types.String `tfsdk:"host"`
+ Port types.Int64 `tfsdk:"port"`
+ Region types.String `tfsdk:"region"`
+ Status types.String `tfsdk:"status"`
+ DefaultDatabase types.String `tfsdk:"default_database"`
+}
+
+// NewUserDataSource is a helper function to simplify the provider implementation.
func NewUserDataSource() datasource.DataSource {
return &userDataSource{}
}
-type dataSourceModel struct {
- DefaultDatabase types.String `tfsdk:"default_database"`
- Host types.String `tfsdk:"host"`
- Id types.String `tfsdk:"id"`
- InstanceId types.String `tfsdk:"instance_id"`
- Port types.Int64 `tfsdk:"port"`
- ProjectId types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- Roles types.List `tfsdk:"roles"`
- Status types.String `tfsdk:"status"`
- UserId types.Int64 `tfsdk:"user_id"`
- Username types.String `tfsdk:"username"`
-}
-
+// userDataSource is the data source implementation.
type userDataSource struct {
- client *sqlserverflexalphaPkg.APIClient
+ client *sqlserverflexalpha.APIClient
providerData core.ProviderData
}
-func (d *userDataSource) Metadata(
+// Metadata returns the data source type name.
+func (r *userDataSource) Metadata(
_ context.Context,
req datasource.MetadataRequest,
resp *datasource.MetadataResponse,
@@ -52,32 +62,109 @@ func (d *userDataSource) Metadata(
resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_user"
}
-func (d *userDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- resp.Schema = sqlserverflexalphaGen.UserDataSourceSchema(ctx)
-}
-
// Configure adds the provider configured client to the data source.
-func (d *userDataSource) Configure(
+func (r *userDataSource) Configure(
ctx context.Context,
req datasource.ConfigureRequest,
resp *datasource.ConfigureResponse,
) {
var ok bool
- d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
+ r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
if !ok {
return
}
- apiClient := sqlserverflexUtils.ConfigureClient(ctx, &d.providerData, &resp.Diagnostics)
+ apiClient := sqlserverflexUtils.ConfigureClient(ctx, &r.providerData, &resp.Diagnostics)
if resp.Diagnostics.HasError() {
return
}
- d.client = apiClient
- tflog.Info(ctx, "SQL SERVER Flex alpha database client configured")
+ r.client = apiClient
+ tflog.Info(ctx, "SQLServer Flex user client configured")
}
-func (d *userDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var model dataSourceModel
+// Schema defines the schema for the data source.
+func (r *userDataSource) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
+ descriptions := map[string]string{
+ "main": "SQLServer Flex user data source schema. Must have a `region` specified in the provider configuration.",
+ "id": "Terraform's internal data source. ID. It is structured as \"`project_id`,`region`,`instance_id`,`user_id`\".",
+ "user_id": "User ID.",
+ "instance_id": "ID of the SQLServer Flex instance.",
+ "project_id": "STACKIT project ID to which the instance is associated.",
+ "username": "Username of the SQLServer Flex instance.",
+ "roles": "Database access levels for the user.",
+ "password": "Password of the user account.",
+ "region": "The resource region. If not defined, the provider region is used.",
+ "status": "Status of the user.",
+ "default_database": "Default database of the user.",
+ }
+
+ resp.Schema = schema.Schema{
+ Description: descriptions["main"],
+ Attributes: map[string]schema.Attribute{
+ "id": schema.StringAttribute{
+ Description: descriptions["id"],
+ Computed: true,
+ },
+ "user_id": schema.Int64Attribute{
+ Description: descriptions["user_id"],
+ Required: true,
+ Validators: []validator.Int64{
+ int64validator.AtLeast(1),
+ },
+ },
+ "instance_id": schema.StringAttribute{
+ Description: descriptions["instance_id"],
+ Required: true,
+ Validators: []validator.String{
+ validate.UUID(),
+ validate.NoSeparator(),
+ },
+ },
+ "project_id": schema.StringAttribute{
+ Description: descriptions["project_id"],
+ Required: true,
+ Validators: []validator.String{
+ validate.UUID(),
+ validate.NoSeparator(),
+ },
+ },
+ "username": schema.StringAttribute{
+ Description: descriptions["username"],
+ Computed: true,
+ },
+ "roles": schema.SetAttribute{
+ Description: descriptions["roles"],
+ ElementType: types.StringType,
+ Computed: true,
+ },
+ "host": schema.StringAttribute{
+ Computed: true,
+ },
+ "port": schema.Int64Attribute{
+ Computed: true,
+ },
+ "region": schema.StringAttribute{
+ // the region cannot be found automatically, so it has to be passed
+ Optional: true,
+ Description: descriptions["region"],
+ },
+ "status": schema.StringAttribute{
+ Computed: true,
+ },
+ "default_database": schema.StringAttribute{
+ Computed: true,
+ },
+ },
+ }
+}
+
+// Read refreshes the Terraform state with the latest data.
+func (r *userDataSource) Read(
+ ctx context.Context,
+ req datasource.ReadRequest,
+ resp *datasource.ReadResponse,
+) { // nolint:gocritic // function signature required by Terraform
+ var model DataSourceModel
diags := req.Config.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
@@ -89,13 +176,13 @@ func (d *userDataSource) Read(ctx context.Context, req datasource.ReadRequest, r
projectId := model.ProjectId.ValueString()
instanceId := model.InstanceId.ValueString()
userId := model.UserId.ValueInt64()
- region := d.providerData.GetRegionWithOverride(model.Region)
+ region := r.providerData.GetRegionWithOverride(model.Region)
ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "instance_id", instanceId)
ctx = tflog.SetField(ctx, "user_id", userId)
ctx = tflog.SetField(ctx, "region", region)
- recordSetResp, err := d.client.DefaultAPI.GetUserRequest(ctx, projectId, region, instanceId, userId).Execute()
+ recordSetResp, err := r.client.GetUserRequest(ctx, projectId, region, instanceId, userId).Execute()
if err != nil {
utils.LogError(
ctx,
@@ -136,5 +223,50 @@ func (d *userDataSource) Read(ctx context.Context, req datasource.ReadRequest, r
if resp.Diagnostics.HasError() {
return
}
- tflog.Info(ctx, "SQLServer Flex Alpha instance read")
+ tflog.Info(ctx, "SQLServer Flex instance read")
+}
+
+func mapDataSourceFields(userResp *sqlserverflexalpha.GetUserResponse, model *DataSourceModel, region string) error {
+ if userResp == nil {
+ return fmt.Errorf("response is nil")
+ }
+ if model == nil {
+ return fmt.Errorf("model input is nil")
+ }
+ user := userResp
+
+ var userId int64
+ if model.UserId.ValueInt64() != 0 {
+ userId = model.UserId.ValueInt64()
+ } else if user.Id != nil {
+ userId = *user.Id
+ } else {
+ return fmt.Errorf("user id not present")
+ }
+ model.Id = utils.BuildInternalTerraformId(
+ model.ProjectId.ValueString(), region, model.InstanceId.ValueString(), strconv.FormatInt(userId, 10),
+ )
+ model.UserId = types.Int64Value(userId)
+ model.Username = types.StringPointerValue(user.Username)
+
+ if user.Roles == nil {
+ model.Roles = types.SetNull(types.StringType)
+ } else {
+ var roles []attr.Value
+ for _, role := range *user.Roles {
+ roles = append(roles, types.StringValue(string(role)))
+ }
+ rolesSet, diags := types.SetValue(types.StringType, roles)
+ if diags.HasError() {
+ return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags))
+ }
+ model.Roles = rolesSet
+ }
+ model.Host = types.StringPointerValue(user.Host)
+ model.Port = types.Int64PointerValue(user.Port)
+ model.Region = types.StringValue(region)
+ model.Status = types.StringPointerValue(user.Status)
+ model.DefaultDatabase = types.StringPointerValue(user.DefaultDatabase)
+
+ return nil
}
diff --git a/stackit/internal/services/sqlserverflexalpha/user/datasource_test.go b/stackit/internal/services/sqlserverflexalpha/user/datasource_test.go
new file mode 100644
index 00000000..b98c2e53
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexalpha/user/datasource_test.go
@@ -0,0 +1,147 @@
+package sqlserverflexalpha
+
+import (
+ "testing"
+
+ "github.com/google/go-cmp/cmp"
+ "github.com/hashicorp/terraform-plugin-framework/attr"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/stackitcloud/stackit-sdk-go/core/utils"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
+)
+
+func TestMapDataSourceFields(t *testing.T) {
+ const testRegion = "region"
+ tests := []struct {
+ description string
+ input *sqlserverflexalpha.GetUserResponse
+ region string
+ expected DataSourceModel
+ isValid bool
+ }{
+ {
+ "default_values",
+ &sqlserverflexalpha.GetUserResponse{},
+ testRegion,
+ DataSourceModel{
+ Id: types.StringValue("pid,region,iid,1"),
+ UserId: types.Int64Value(1),
+ InstanceId: types.StringValue("iid"),
+ ProjectId: types.StringValue("pid"),
+ Username: types.StringNull(),
+ Roles: types.SetNull(types.StringType),
+ Host: types.StringNull(),
+ Port: types.Int64Null(),
+ Region: types.StringValue(testRegion),
+ Status: types.StringNull(),
+ DefaultDatabase: types.StringNull(),
+ },
+ true,
+ },
+ {
+ "simple_values",
+ &sqlserverflexalpha.GetUserResponse{
+
+ Roles: &[]sqlserverflexalpha.UserRole{
+ "role_1",
+ "role_2",
+ "",
+ },
+ Username: utils.Ptr("username"),
+ Host: utils.Ptr("host"),
+ Port: utils.Ptr(int64(1234)),
+ Status: utils.Ptr("active"),
+ DefaultDatabase: utils.Ptr("default_db"),
+ },
+ testRegion,
+ DataSourceModel{
+ Id: types.StringValue("pid,region,iid,1"),
+ UserId: types.Int64Value(1),
+ InstanceId: types.StringValue("iid"),
+ ProjectId: types.StringValue("pid"),
+ Username: types.StringValue("username"),
+ Roles: types.SetValueMust(
+ types.StringType, []attr.Value{
+ types.StringValue("role_1"),
+ types.StringValue("role_2"),
+ types.StringValue(""),
+ },
+ ),
+ Host: types.StringValue("host"),
+ Port: types.Int64Value(1234),
+ Region: types.StringValue(testRegion),
+ Status: types.StringValue("active"),
+ DefaultDatabase: types.StringValue("default_db"),
+ },
+ true,
+ },
+ {
+ "null_fields_and_int_conversions",
+ &sqlserverflexalpha.GetUserResponse{
+ Id: utils.Ptr(int64(1)),
+ Roles: &[]sqlserverflexalpha.UserRole{},
+ Username: nil,
+ Host: nil,
+ Port: utils.Ptr(int64(2123456789)),
+ },
+ testRegion,
+ DataSourceModel{
+ Id: types.StringValue("pid,region,iid,1"),
+ UserId: types.Int64Value(1),
+ InstanceId: types.StringValue("iid"),
+ ProjectId: types.StringValue("pid"),
+ Username: types.StringNull(),
+ Roles: types.SetValueMust(types.StringType, []attr.Value{}),
+ Host: types.StringNull(),
+ Port: types.Int64Value(2123456789),
+ Region: types.StringValue(testRegion),
+ },
+ true,
+ },
+ {
+ "nil_response",
+ nil,
+ testRegion,
+ DataSourceModel{},
+ false,
+ },
+ {
+ "nil_response_2",
+ &sqlserverflexalpha.GetUserResponse{},
+ testRegion,
+ DataSourceModel{},
+ false,
+ },
+ {
+ "no_resource_id",
+ &sqlserverflexalpha.GetUserResponse{},
+ testRegion,
+ DataSourceModel{},
+ false,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(
+ tt.description, func(t *testing.T) {
+ state := &DataSourceModel{
+ ProjectId: tt.expected.ProjectId,
+ InstanceId: tt.expected.InstanceId,
+ UserId: tt.expected.UserId,
+ }
+ err := mapDataSourceFields(tt.input, state, tt.region)
+ if !tt.isValid && err == nil {
+ t.Fatalf("Should have failed")
+ }
+ if tt.isValid && err != nil {
+ t.Fatalf("Should not have failed: %v", err)
+ }
+ if tt.isValid {
+ diff := cmp.Diff(state, &tt.expected)
+ if diff != "" {
+ t.Fatalf("Data does not match: %s", diff)
+ }
+ }
+ },
+ )
+ }
+}
diff --git a/stackit/internal/services/sqlserverflexalpha/user/datasources_gen/user_data_source_gen.go b/stackit/internal/services/sqlserverflexalpha/user/datasources_gen/user_data_source_gen.go
index 329469ea..3d252237 100644
--- a/stackit/internal/services/sqlserverflexalpha/user/datasources_gen/user_data_source_gen.go
+++ b/stackit/internal/services/sqlserverflexalpha/user/datasources_gen/user_data_source_gen.go
@@ -98,7 +98,7 @@ func UserDataSourceSchema(ctx context.Context) schema.Schema {
"users": schema.ListNestedAttribute{
NestedObject: schema.NestedAttributeObject{
Attributes: map[string]schema.Attribute{
- "tf_original_api_id": schema.Int64Attribute{
+ "id": schema.Int64Attribute{
Computed: true,
Description: "The ID of the user.",
MarkdownDescription: "The ID of the user.",
diff --git a/stackit/internal/services/sqlserverflexalpha/user/mapper.go b/stackit/internal/services/sqlserverflexalpha/user/mapper.go
deleted file mode 100644
index 9e25be94..00000000
--- a/stackit/internal/services/sqlserverflexalpha/user/mapper.go
+++ /dev/null
@@ -1,197 +0,0 @@
-package sqlserverflexalpha
-
-import (
- "fmt"
- "slices"
- "strconv"
-
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/types"
-
- "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3alpha1api"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-)
-
-// mapDataSourceFields maps the API response to a dataSourceModel.
-func mapDataSourceFields(userResp *v3alpha1api.GetUserResponse, model *dataSourceModel, region string) error {
- if userResp == nil {
- return fmt.Errorf("response is nil")
- }
- if model == nil {
- return fmt.Errorf("model input is nil")
- }
- user := userResp
-
- // Handle user ID
- var userId int64
- if model.UserId.ValueInt64() != 0 {
- userId = model.UserId.ValueInt64()
- } else if user.Id != 0 {
- userId = user.Id
- } else {
- return fmt.Errorf("user id not present")
- }
-
- // Set main attributes
- model.Id = utils.BuildInternalTerraformId(
- model.ProjectId.ValueString(), region, model.InstanceId.ValueString(), strconv.FormatInt(userId, 10),
- )
- model.UserId = types.Int64Value(userId)
- model.Username = types.StringValue(user.Username)
-
- // Map roles
- if user.Roles == nil {
- model.Roles = types.List(types.SetNull(types.StringType))
- } else {
- resRoles := user.Roles
- slices.Sort(resRoles)
-
- var roles []attr.Value
- for _, role := range resRoles {
- roles = append(roles, types.StringValue(string(role)))
- }
- rolesSet, diags := types.SetValue(types.StringType, roles)
- if diags.HasError() {
- return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags))
- }
- model.Roles = types.List(rolesSet)
- }
-
- // Set remaining attributes
- model.Host = types.StringValue(user.Host)
- model.Port = types.Int64Value(int64(user.Port))
- model.Region = types.StringValue(region)
- model.Status = types.StringValue(user.Status)
- model.DefaultDatabase = types.StringValue(user.DefaultDatabase)
-
- return nil
-}
-
-// mapFields maps the API response to a resourceModel.
-func mapFields(userResp *v3alpha1api.GetUserResponse, model *resourceModel, region string) error {
- if userResp == nil {
- return fmt.Errorf("response is nil")
- }
- if model == nil {
- return fmt.Errorf("model input is nil")
- }
- user := userResp
-
- // Handle user ID
- var userID int64
- if model.UserId.ValueInt64() != 0 {
- userID = model.UserId.ValueInt64()
- } else if user.Id != 0 {
- userID = user.Id
- } else {
- return fmt.Errorf("user id not present")
- }
-
- // Set main attributes
- model.Id = types.Int64Value(userID)
- model.UserId = types.Int64Value(userID)
- model.Username = types.StringValue(user.Username)
-
- // Map roles
- if user.Roles != nil {
- resRoles := user.Roles
- slices.Sort(resRoles)
-
- var roles []attr.Value
- for _, role := range resRoles {
- roles = append(roles, types.StringValue(string(role)))
- }
- rolesSet, diags := types.SetValue(types.StringType, roles)
- if diags.HasError() {
- return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags))
- }
- model.Roles = types.List(rolesSet)
- }
-
- // Ensure roles is not null
- if model.Roles.IsNull() || model.Roles.IsUnknown() {
- model.Roles = types.List(types.SetNull(types.StringType))
- }
-
- // Set connection details
- model.Host = types.StringValue(user.Host)
- model.Port = types.Int64Value(int64(user.Port))
- model.Region = types.StringValue(region)
- return nil
-}
-
-// mapFieldsCreate maps the API response from creating a user to a resourceModel.
-func mapFieldsCreate(userResp *v3alpha1api.CreateUserResponse, model *resourceModel, region string) error {
- if userResp == nil {
- return fmt.Errorf("response is nil")
- }
- if model == nil {
- return fmt.Errorf("model input is nil")
- }
- user := userResp
-
- if user.Id == 0 {
- return fmt.Errorf("user id not present")
- }
- userID := user.Id
- model.Id = types.Int64Value(userID)
- model.UserId = types.Int64Value(userID)
- model.Username = types.StringValue(user.Username)
-
- if user.Password == "" {
- return fmt.Errorf("user password not present")
- }
- model.Password = types.StringValue(user.Password)
-
- if len(user.Roles) > 0 {
- resRoles := user.Roles
- slices.Sort(resRoles)
-
- var roles []attr.Value
- for _, role := range resRoles {
- roles = append(roles, types.StringValue(string(role)))
- }
- rolesSet, diags := types.SetValue(types.StringType, roles)
- if diags.HasError() {
- return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags))
- }
- model.Roles = types.List(rolesSet)
- }
-
- if model.Roles.IsNull() || model.Roles.IsUnknown() {
- model.Roles = types.List(types.SetNull(types.StringType))
- }
-
- model.Password = types.StringValue(user.Password)
- model.Uri = types.StringValue(user.Uri)
-
- model.Host = types.StringValue(user.Host)
- model.Port = types.Int64Value(int64(user.Port))
- model.Region = types.StringValue(region)
- model.Status = types.StringValue(user.Status)
- model.DefaultDatabase = types.StringValue(user.DefaultDatabase)
-
- return nil
-}
-
-// toCreatePayload converts a resourceModel to an API CreateUserRequestPayload.
-func toCreatePayload(
- model *resourceModel,
- roles []string,
-) (*v3alpha1api.CreateUserRequestPayload, error) {
- if model == nil {
- return nil, fmt.Errorf("nil model")
- }
-
- res := v3alpha1api.CreateUserRequestPayload{
- Username: model.Username.ValueString(),
- DefaultDatabase: nil,
- Roles: roles,
- }
- if !model.DefaultDatabase.IsUnknown() && !model.DefaultDatabase.IsNull() {
- res.DefaultDatabase = model.DefaultDatabase.ValueStringPointer()
- }
- return &res, nil
-}
diff --git a/stackit/internal/services/sqlserverflexalpha/user/mapper_test.go b/stackit/internal/services/sqlserverflexalpha/user/mapper_test.go
deleted file mode 100644
index 394d7a00..00000000
--- a/stackit/internal/services/sqlserverflexalpha/user/mapper_test.go
+++ /dev/null
@@ -1,540 +0,0 @@
-package sqlserverflexalpha
-
-import (
- "testing"
-
- "github.com/google/go-cmp/cmp"
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/types"
-
- "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3alpha1api"
-)
-
-func TestMapDataSourceFields(t *testing.T) {
- const testRegion = "region"
- tests := []struct {
- description string
- input *v3alpha1api.GetUserResponse
- region string
- expected dataSourceModel
- isValid bool
- }{
- {
- "default_values",
- &v3alpha1api.GetUserResponse{},
- testRegion,
- dataSourceModel{
- Id: types.StringValue("pid,region,iid,1"),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringValue(""),
- Roles: types.List(types.SetNull(types.StringType)),
- Host: types.StringValue(""),
- Port: types.Int64Value(0),
- Region: types.StringValue(testRegion),
- Status: types.StringValue(""),
- DefaultDatabase: types.StringValue(""),
- },
- true,
- },
- {
- "simple_values",
- &v3alpha1api.GetUserResponse{
- Roles: []string{
- "##STACKIT_SQLAgentUser##",
- "##STACKIT_DatabaseManager##",
- "##STACKIT_LoginManager##",
- "##STACKIT_SQLAgentManager##",
- "##STACKIT_ProcessManager##",
- "##STACKIT_ServerManager##",
- },
- Username: "username",
- Host: "host",
- Port: int32(1234),
- Status: "active",
- DefaultDatabase: "default_db",
- },
- testRegion,
- dataSourceModel{
- Id: types.StringValue("pid,region,iid,1"),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringValue("username"),
- Roles: types.List(
- types.SetValueMust(
- types.StringType, []attr.Value{
- types.StringValue("##STACKIT_DatabaseManager##"),
- types.StringValue("##STACKIT_LoginManager##"),
- types.StringValue("##STACKIT_ProcessManager##"),
- types.StringValue("##STACKIT_SQLAgentManager##"),
- types.StringValue("##STACKIT_SQLAgentUser##"),
- types.StringValue("##STACKIT_ServerManager##"),
- },
- ),
- ),
- Host: types.StringValue("host"),
- Port: types.Int64Value(1234),
- Region: types.StringValue(testRegion),
- Status: types.StringValue("active"),
- DefaultDatabase: types.StringValue("default_db"),
- },
- true,
- },
- {
- "null_fields_and_int_conversions",
- &v3alpha1api.GetUserResponse{
- Id: int64(1),
- Roles: []string{},
- Username: "",
- Host: "",
- Port: int32(2123456789),
- },
- testRegion,
- dataSourceModel{
- Id: types.StringValue("pid,region,iid,1"),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringValue(""),
- Roles: types.List(types.SetValueMust(types.StringType, []attr.Value{})),
- Host: types.StringValue(""),
- Port: types.Int64Value(2123456789),
- Region: types.StringValue(testRegion),
- DefaultDatabase: types.StringValue(""),
- Status: types.StringValue(""),
- },
- true,
- },
- {
- "nil_response",
- nil,
- testRegion,
- dataSourceModel{},
- false,
- },
- {
- "nil_response_2",
- &v3alpha1api.GetUserResponse{},
- testRegion,
- dataSourceModel{},
- false,
- },
- {
- "no_resource_id",
- &v3alpha1api.GetUserResponse{},
- testRegion,
- dataSourceModel{},
- false,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.description, func(t *testing.T) {
- state := &dataSourceModel{
- ProjectId: tt.expected.ProjectId,
- InstanceId: tt.expected.InstanceId,
- UserId: tt.expected.UserId,
- }
- err := mapDataSourceFields(tt.input, state, tt.region)
- if !tt.isValid && err == nil {
- t.Fatalf("Should have failed")
- }
- if tt.isValid && err != nil {
- t.Fatalf("Should not have failed: %v", err)
- }
- if tt.isValid {
- diff := cmp.Diff(&tt.expected, state)
- if diff != "" {
- t.Fatalf("Data does not match: %s", diff)
- }
- }
- },
- )
- }
-}
-
-func TestMapFieldsCreate(t *testing.T) {
- const testRegion = "region"
- tests := []struct {
- description string
- input *v3alpha1api.CreateUserResponse
- region string
- expected resourceModel
- isValid bool
- }{
- {
- "default_values",
- &v3alpha1api.CreateUserResponse{
- Id: int64(1),
- Password: "xy",
- },
- testRegion,
- resourceModel{
- Id: types.Int64Value(1),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringValue(""),
- Roles: types.List(types.SetNull(types.StringType)),
- Password: types.StringValue("xy"),
- Host: types.StringValue(""),
- Port: types.Int64Value(0),
- Region: types.StringValue(testRegion),
- DefaultDatabase: types.StringValue(""),
- Status: types.StringValue(""),
- Uri: types.StringValue(""),
- },
- true,
- },
- {
- "simple_values",
- &v3alpha1api.CreateUserResponse{
- Id: int64(2),
- Roles: []string{
- "role_2",
- "role_1",
- "",
- },
- Username: "username",
- Password: "password",
- Host: "host",
- Port: int32(1234),
- Status: "status",
- DefaultDatabase: "default_db",
- Uri: "myURI",
- },
- testRegion,
- resourceModel{
- Id: types.Int64Value(2),
- UserId: types.Int64Value(2),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringValue("username"),
- Roles: types.List(
- types.SetValueMust(
- types.StringType, []attr.Value{
- types.StringValue(""),
- types.StringValue("role_1"),
- types.StringValue("role_2"),
- },
- ),
- ),
- Password: types.StringValue("password"),
- Host: types.StringValue("host"),
- Port: types.Int64Value(1234),
- Region: types.StringValue(testRegion),
- Status: types.StringValue("status"),
- DefaultDatabase: types.StringValue("default_db"),
- Uri: types.StringValue("myURI"),
- },
- true,
- },
- {
- "null_fields_and_int_conversions",
- &v3alpha1api.CreateUserResponse{
- Id: int64(3),
- Roles: []string{},
- Username: "",
- Password: "xy",
- Host: "",
- Port: int32(256789),
- },
- testRegion,
- resourceModel{
- Id: types.Int64Value(3),
- UserId: types.Int64Value(3),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringValue(""),
- Roles: types.ListNull(types.StringType),
- Password: types.StringValue("xy"),
- Host: types.StringValue(""),
- Port: types.Int64Value(256789),
- Region: types.StringValue(testRegion),
- DefaultDatabase: types.StringValue(""),
- Status: types.StringValue(""),
- Uri: types.StringValue(""),
- },
- true,
- },
- {
- "nil_response",
- nil,
- testRegion,
- resourceModel{},
- false,
- },
- {
- "nil_response_2",
- &v3alpha1api.CreateUserResponse{},
- testRegion,
- resourceModel{},
- false,
- },
- {
- "no_resource_id",
- &v3alpha1api.CreateUserResponse{},
- testRegion,
- resourceModel{},
- false,
- },
- {
- "no_password",
- &v3alpha1api.CreateUserResponse{
- Id: int64(1),
- },
- testRegion,
- resourceModel{},
- false,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.description, func(t *testing.T) {
- state := &resourceModel{
- ProjectId: tt.expected.ProjectId,
- InstanceId: tt.expected.InstanceId,
- }
- err := mapFieldsCreate(tt.input, state, tt.region)
- if !tt.isValid && err == nil {
- t.Fatalf("Should have failed")
- }
- if tt.isValid && err != nil {
- t.Fatalf("Should not have failed: %v", err)
- }
- if tt.isValid {
- diff := cmp.Diff(&tt.expected, state)
- if diff != "" {
- t.Fatalf("Data does not match: %s", diff)
- }
- }
- },
- )
- }
-}
-
-func TestMapFields(t *testing.T) {
- const testRegion = "region"
- tests := []struct {
- description string
- input *v3alpha1api.GetUserResponse
- region string
- expected resourceModel
- isValid bool
- }{
- {
- "default_values",
- &v3alpha1api.GetUserResponse{},
- testRegion,
- resourceModel{
- Id: types.Int64Value(1),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringValue(""),
- Roles: types.List(types.SetNull(types.StringType)),
- Host: types.StringValue(""),
- Port: types.Int64Value(0),
- Region: types.StringValue(testRegion),
- },
- true,
- },
- {
- "simple_values",
- &v3alpha1api.GetUserResponse{
- Roles: []string{
- "role_2",
- "role_1",
- "",
- },
- Username: ("username"),
- Host: ("host"),
- Port: (int32(1234)),
- },
- testRegion,
- resourceModel{
- Id: types.Int64Value(2),
- UserId: types.Int64Value(2),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringValue("username"),
- Roles: types.List(
- types.SetValueMust(
- types.StringType, []attr.Value{
- types.StringValue(""),
- types.StringValue("role_1"),
- types.StringValue("role_2"),
- },
- ),
- ),
- Host: types.StringValue("host"),
- Port: types.Int64Value(1234),
- Region: types.StringValue(testRegion),
- },
- true,
- },
- {
- "null_fields_and_int_conversions",
- &v3alpha1api.GetUserResponse{
- Id: int64(1),
- Roles: []string{},
- Username: "",
- Host: "",
- Port: int32(2123456789),
- },
- testRegion,
- resourceModel{
- Id: types.Int64Value(1),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringValue(""),
- Roles: types.List(types.SetValueMust(types.StringType, []attr.Value{})),
- Host: types.StringValue(""),
- Port: types.Int64Value(2123456789),
- Region: types.StringValue(testRegion),
- },
- true,
- },
- {
- "nil_response",
- nil,
- testRegion,
- resourceModel{},
- false,
- },
- {
- "nil_response_2",
- &v3alpha1api.GetUserResponse{},
- testRegion,
- resourceModel{},
- false,
- },
- {
- "no_resource_id",
- &v3alpha1api.GetUserResponse{},
- testRegion,
- resourceModel{},
- false,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.description, func(t *testing.T) {
- state := &resourceModel{
- ProjectId: tt.expected.ProjectId,
- InstanceId: tt.expected.InstanceId,
- UserId: tt.expected.UserId,
- }
- err := mapFields(tt.input, state, tt.region)
- if !tt.isValid && err == nil {
- t.Fatalf("Should have failed")
- }
- if tt.isValid && err != nil {
- t.Fatalf("Should not have failed: %v", err)
- }
- if tt.isValid {
- diff := cmp.Diff(&tt.expected, state)
- if diff != "" {
- t.Fatalf("Data does not match: %s", diff)
- }
- }
- },
- )
- }
-}
-
-func TestToCreatePayload(t *testing.T) {
- tests := []struct {
- description string
- input *resourceModel
- inputRoles []string
- expected *v3alpha1api.CreateUserRequestPayload
- isValid bool
- }{
- {
- "default_values",
- &resourceModel{},
- []string{},
- &v3alpha1api.CreateUserRequestPayload{
- Roles: []string{},
- Username: "",
- },
- true,
- },
- {
- "default_values",
- &resourceModel{
- Username: types.StringValue("username"),
- },
- []string{
- "role_1",
- "role_2",
- },
- &v3alpha1api.CreateUserRequestPayload{
- Roles: []string{
- "role_1",
- "role_2",
- },
- Username: "username",
- },
- true,
- },
- {
- "null_fields_and_int_conversions",
- &resourceModel{
- Username: types.StringValue(""),
- },
- []string{
- "",
- },
- &v3alpha1api.CreateUserRequestPayload{
- Roles: []string{
- "",
- },
- Username: "",
- },
- true,
- },
- {
- "nil_model",
- nil,
- []string{},
- nil,
- false,
- },
- {
- "nil_roles",
- &resourceModel{
- Username: types.StringValue("username"),
- },
- []string{},
- &v3alpha1api.CreateUserRequestPayload{
- Roles: []string{},
- Username: "username",
- },
- true,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.description, func(t *testing.T) {
- output, err := toCreatePayload(tt.input, tt.inputRoles)
- if !tt.isValid && err == nil {
- t.Fatalf("Should have failed")
- }
- if tt.isValid && err != nil {
- t.Fatalf("Should not have failed: %v", err)
- }
- if tt.isValid {
- diff := cmp.Diff(tt.expected, output)
- if diff != "" {
- t.Fatalf("Data does not match: %s", diff)
- }
- }
- },
- )
- }
-}
diff --git a/stackit/internal/services/sqlserverflexalpha/user/planModifiers.yaml b/stackit/internal/services/sqlserverflexalpha/user/planModifiers.yaml
deleted file mode 100644
index 8ff346ab..00000000
--- a/stackit/internal/services/sqlserverflexalpha/user/planModifiers.yaml
+++ /dev/null
@@ -1,49 +0,0 @@
-fields:
- - name: 'id'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'instance_id'
- validators:
- - validate.NoSeparator
- - validate.UUID
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'project_id'
- validators:
- - validate.NoSeparator
- - validate.UUID
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'region'
- modifiers:
- - 'RequiresReplace'
-
- - name: 'user_id'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'username'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'roles'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'password'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'uri'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'status'
- modifiers:
- - 'UseStateForUnknown'
diff --git a/stackit/internal/services/sqlserverflexalpha/user/resource.go b/stackit/internal/services/sqlserverflexalpha/user/resource.go
index efa2b57f..2d3978c4 100644
--- a/stackit/internal/services/sqlserverflexalpha/user/resource.go
+++ b/stackit/internal/services/sqlserverflexalpha/user/resource.go
@@ -2,63 +2,69 @@ package sqlserverflexalpha
import (
"context"
- _ "embed"
"errors"
"fmt"
"net/http"
- "slices"
"strconv"
"strings"
- "time"
- "github.com/hashicorp/terraform-plugin-framework/path"
- "github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/core/oapierror"
-
- sqlserverflexalpha "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3alpha1api"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/int64planmodifier"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
sqlserverflexalphaUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils"
- sqlserverflexalphaWait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexalpha"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-log/tflog"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/validate"
- sqlserverflexalphaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/user/resources_gen"
+ "github.com/hashicorp/terraform-plugin-framework/attr"
+ "github.com/hashicorp/terraform-plugin-framework/path"
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/setplanmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/stackitcloud/stackit-sdk-go/core/oapierror"
)
+// Ensure the implementation satisfies the expected interfaces.
var (
- _ resource.Resource = &userResource{}
- _ resource.ResourceWithConfigure = &userResource{}
- _ resource.ResourceWithImportState = &userResource{}
- _ resource.ResourceWithModifyPlan = &userResource{}
- _ resource.ResourceWithIdentity = &userResource{}
- _ resource.ResourceWithValidateConfig = &userResource{}
+ _ resource.Resource = &userResource{}
+ _ resource.ResourceWithConfigure = &userResource{}
+ _ resource.ResourceWithImportState = &userResource{}
+ _ resource.ResourceWithModifyPlan = &userResource{}
)
+type Model struct {
+ Id types.String `tfsdk:"id"` // needed by TF
+ UserId types.Int64 `tfsdk:"user_id"`
+ InstanceId types.String `tfsdk:"instance_id"`
+ ProjectId types.String `tfsdk:"project_id"`
+ Username types.String `tfsdk:"username"`
+ Roles types.Set `tfsdk:"roles"`
+ Password types.String `tfsdk:"password"`
+ Host types.String `tfsdk:"host"`
+ Port types.Int64 `tfsdk:"port"`
+ Region types.String `tfsdk:"region"`
+ Status types.String `tfsdk:"status"`
+ DefaultDatabase types.String `tfsdk:"default_database"`
+}
+
+// NewUserResource is a helper function to simplify the provider implementation.
func NewUserResource() resource.Resource {
return &userResource{}
}
-// resourceModel describes the resource data model.
-type resourceModel = sqlserverflexalphaResGen.UserModel
-
-// UserResourceIdentityModel describes the resource's identity attributes.
-type UserResourceIdentityModel struct {
- ProjectID types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- InstanceID types.String `tfsdk:"instance_id"`
- UserID types.Int64 `tfsdk:"user_id"`
-}
-
+// userResource is the resource implementation.
type userResource struct {
client *sqlserverflexalpha.APIClient
providerData core.ProviderData
}
+// Metadata returns the resource type name.
func (r *userResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_user"
}
@@ -86,7 +92,7 @@ func (r *userResource) ModifyPlan(
req resource.ModifyPlanRequest,
resp *resource.ModifyPlanResponse,
) { // nolint:gocritic // function signature required by Terraform
- var configModel resourceModel
+ var configModel Model
// skip initial empty configuration to avoid follow-up errors
if req.Config.Raw.IsNull() {
return
@@ -96,7 +102,7 @@ func (r *userResource) ModifyPlan(
return
}
- var planModel resourceModel
+ var planModel Model
resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...)
if resp.Diagnostics.HasError() {
return
@@ -113,91 +119,116 @@ func (r *userResource) ModifyPlan(
}
}
-//go:embed planModifiers.yaml
-var modifiersFileByte []byte
-
// Schema defines the schema for the resource.
-func (r *userResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
- s := sqlserverflexalphaResGen.UserResourceSchema(ctx)
-
- fields, err := utils.ReadModifiersConfig(modifiersFileByte)
- if err != nil {
- resp.Diagnostics.AddError("error during read modifiers config file", err.Error())
- return
+func (r *userResource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
+ descriptions := map[string]string{
+ "main": "SQLServer Flex user resource schema. Must have a `region` specified in the provider configuration.",
+ "id": "Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`,`user_id`\".",
+ "user_id": "User ID.",
+ "instance_id": "ID of the SQLServer Flex instance.",
+ "project_id": "STACKIT project ID to which the instance is associated.",
+ "username": "Username of the SQLServer Flex instance.",
+ "roles": "Database access levels for the user. The values for the default roles are: `##STACKIT_DatabaseManager##`, `##STACKIT_LoginManager##`, `##STACKIT_ProcessManager##`, `##STACKIT_ServerManager##`, `##STACKIT_SQLAgentManager##`, `##STACKIT_SQLAgentUser##`",
+ "password": "Password of the user account.",
+ "status": "Status of the user.",
+ "default_database": "Default database of the user.",
}
- err = utils.AddPlanModifiersToResourceSchema(fields, &s)
- if err != nil {
- resp.Diagnostics.AddError("error adding plan modifiers", err.Error())
- return
- }
- resp.Schema = s
-}
-
-// IdentitySchema defines the schema for the resource's identity attributes.
-func (r *userResource) IdentitySchema(
- _ context.Context,
- _ resource.IdentitySchemaRequest,
- response *resource.IdentitySchemaResponse,
-) {
- response.IdentitySchema = identityschema.Schema{
- Attributes: map[string]identityschema.Attribute{
- "project_id": identityschema.StringAttribute{
- RequiredForImport: true, // must be set during import by the practitioner
+ resp.Schema = schema.Schema{
+ Description: descriptions["main"],
+ Attributes: map[string]schema.Attribute{
+ "id": schema.StringAttribute{
+ Description: descriptions["id"],
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.UseStateForUnknown(),
+ },
},
- "region": identityschema.StringAttribute{
- RequiredForImport: true, // can be defaulted by the provider configuration
+ "user_id": schema.Int64Attribute{
+ Description: descriptions["user_id"],
+ Computed: true,
+ PlanModifiers: []planmodifier.Int64{
+ int64planmodifier.UseStateForUnknown(),
+ },
+ Validators: []validator.Int64{},
},
- "instance_id": identityschema.StringAttribute{
- RequiredForImport: true, // can be defaulted by the provider configuration
+ "instance_id": schema.StringAttribute{
+ Description: descriptions["instance_id"],
+ Required: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ Validators: []validator.String{
+ validate.UUID(),
+ validate.NoSeparator(),
+ },
},
- "user_id": identityschema.Int64Attribute{
- RequiredForImport: true, // can be defaulted by the provider configuration
+ "project_id": schema.StringAttribute{
+ Description: descriptions["project_id"],
+ Required: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ Validators: []validator.String{
+ validate.UUID(),
+ validate.NoSeparator(),
+ },
+ },
+ "username": schema.StringAttribute{
+ Description: descriptions["username"],
+ Required: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "roles": schema.SetAttribute{
+ Description: descriptions["roles"],
+ ElementType: types.StringType,
+ Required: true,
+ PlanModifiers: []planmodifier.Set{
+ setplanmodifier.RequiresReplace(),
+ },
+ },
+ "password": schema.StringAttribute{
+ Description: descriptions["password"],
+ Computed: true,
+ Sensitive: true,
+ },
+ "host": schema.StringAttribute{
+ Computed: true,
+ },
+ "port": schema.Int64Attribute{
+ Computed: true,
+ },
+ "region": schema.StringAttribute{
+ Optional: true,
+ // must be computed to allow for storing the override value from the provider
+ Computed: true,
+ Description: descriptions["region"],
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ },
+ "status": schema.StringAttribute{
+ Computed: true,
+ },
+ "default_database": schema.StringAttribute{
+ Computed: true,
},
},
}
}
-func (r *userResource) ValidateConfig(
- ctx context.Context,
- req resource.ValidateConfigRequest,
- resp *resource.ValidateConfigResponse,
-) {
- var data resourceModel
-
- resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- var roles []string
- diags := data.Roles.ElementsAs(ctx, &roles, false)
- resp.Diagnostics.Append(diags...)
- if diags.HasError() {
- return
- }
-
- var resRoles []string
- for _, role := range roles {
- if slices.Contains(resRoles, role) {
- resp.Diagnostics.AddAttributeError(
- path.Root("roles"),
- "Attribute Configuration Error",
- "defined roles MUST NOT contain duplicates",
- )
- return
- }
- resRoles = append(resRoles, role)
- }
-}
-
// Create creates the resource and sets the initial Terraform state.
func (r *userResource) Create(
ctx context.Context,
req resource.CreateRequest,
resp *resource.CreateResponse,
) { // nolint:gocritic // function signature required by Terraform
- var model resourceModel
+ var model Model
diags := req.Plan.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
@@ -206,23 +237,21 @@ func (r *userResource) Create(
ctx = core.InitProviderContext(ctx)
- projectID := model.ProjectId.ValueString()
- instanceID := model.InstanceId.ValueString()
+ projectId := model.ProjectId.ValueString()
+ instanceId := model.InstanceId.ValueString()
region := model.Region.ValueString()
- ctx = tflog.SetField(ctx, "project_id", projectID)
- ctx = tflog.SetField(ctx, "instance_id", instanceID)
+ ctx = tflog.SetField(ctx, "project_id", projectId)
+ ctx = tflog.SetField(ctx, "instance_id", instanceId)
ctx = tflog.SetField(ctx, "region", region)
- var roles []string
+ var roles []sqlserverflexalpha.UserRole
if !model.Roles.IsNull() && !model.Roles.IsUnknown() {
diags = model.Roles.ElementsAs(ctx, &roles, false)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
-
- slices.Sort(roles)
}
// Generate API request body from model
@@ -232,11 +261,11 @@ func (r *userResource) Create(
return
}
// Create new user
- userResp, err := r.client.DefaultAPI.CreateUserRequest(
+ userResp, err := r.client.CreateUserRequest(
ctx,
- projectID,
+ projectId,
region,
- instanceID,
+ instanceId,
).CreateUserRequestPayload(*payload).Execute()
if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating user", fmt.Sprintf("Calling API: %v", err))
@@ -245,7 +274,7 @@ func (r *userResource) Create(
ctx = core.LogResponse(ctx)
- if userResp == nil || userResp.Id == 0 {
+ if userResp == nil || userResp.Id == nil || *userResp.Id == 0 {
core.LogAndAddError(
ctx,
&resp.Diagnostics,
@@ -254,68 +283,11 @@ func (r *userResource) Create(
)
return
}
-
- userId := userResp.Id
+ userId := *userResp.Id
ctx = tflog.SetField(ctx, "user_id", userId)
- // Set data returned by API in identity
- identity := UserResourceIdentityModel{
- ProjectID: types.StringValue(projectID),
- Region: types.StringValue(region),
- InstanceID: types.StringValue(instanceID),
- UserID: types.Int64Value(userId),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- err = mapFieldsCreate(userResp, &model, region)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error creating user",
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
-
- waitResp, err := sqlserverflexalphaWait.CreateUserWaitHandler(
- ctx,
- r.client.DefaultAPI,
- projectID,
- instanceID,
- region,
- userId,
- ).SetSleepBeforeWait(
- 90 * time.Second,
- ).SetTimeout(
- 90 * time.Minute,
- ).WaitWithContext(ctx)
-
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "create user",
- fmt.Sprintf("Instance creation waiting: %v", err),
- )
- return
- }
-
- if waitResp.Id == 0 {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "create user",
- "Instance creation waiting: returned id is nil",
- )
- return
- }
-
// Map response body to schema
- err = mapFields(waitResp, &model, region)
+ err = mapFieldsCreate(userResp, &model, region)
if err != nil {
core.LogAndAddError(
ctx,
@@ -340,7 +312,7 @@ func (r *userResource) Read(
req resource.ReadRequest,
resp *resource.ReadResponse,
) { // nolint:gocritic // function signature required by Terraform
- var model resourceModel
+ var model Model
diags := req.State.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
@@ -358,7 +330,7 @@ func (r *userResource) Read(
ctx = tflog.SetField(ctx, "user_id", userId)
ctx = tflog.SetField(ctx, "region", region)
- recordSetResp, err := r.client.DefaultAPI.GetUserRequest(ctx, projectId, region, instanceId, userId).Execute()
+ recordSetResp, err := r.client.GetUserRequest(ctx, projectId, region, instanceId, userId).Execute()
if err != nil {
var oapiErr *oapierror.GenericOpenAPIError
ok := errors.As(
@@ -388,18 +360,6 @@ func (r *userResource) Read(
return
}
- // Set data returned by API in identity
- identity := UserResourceIdentityModel{
- ProjectID: types.StringValue(projectId),
- Region: types.StringValue(region),
- InstanceID: types.StringValue(instanceId),
- UserID: types.Int64Value(userId),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
// Set refreshed state
diags = resp.State.Set(ctx, model)
resp.Diagnostics.Append(diags...)
@@ -426,7 +386,7 @@ func (r *userResource) Delete(
resp *resource.DeleteResponse,
) { // nolint:gocritic // function signature required by Terraform
// Retrieve values from plan
- var model resourceModel
+ var model Model
diags := req.State.Get(ctx, &model)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
@@ -445,40 +405,14 @@ func (r *userResource) Delete(
ctx = tflog.SetField(ctx, "region", region)
// Delete existing record set
- // err := r.client.DeleteUserRequest(ctx, projectId, region, instanceId, userId).Execute()
- err := r.client.DefaultAPI.DeleteUserRequest(ctx, projectId, region, instanceId, userId).Execute()
+ err := r.client.DeleteUserRequest(ctx, projectId, region, instanceId, userId).Execute()
if err != nil {
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(err, &oapiErr)
- if !ok {
- // TODO err handling
- return
- }
-
- switch oapiErr.StatusCode {
- case http.StatusNotFound:
- resp.State.RemoveResource(ctx)
- return
- // case http.StatusInternalServerError:
- // tflog.Warn(ctx, "[delete user] Wait handler got error 500")
- // return false, nil, nil
- default:
- // TODO err handling
- return
- }
- }
- // Delete existing record set
- _, err = sqlserverflexalphaWait.DeleteUserWaitHandler(ctx, r.client.DefaultAPI, projectId, region, instanceId, userId).
- WaitWithContext(ctx)
- if err != nil {
- core.LogAndAddError(ctx, &resp.Diagnostics, "User Delete Error", fmt.Sprintf("Calling API: %v", err))
+ core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting user", fmt.Sprintf("Calling API: %v", err))
return
}
ctx = core.LogResponse(ctx)
- resp.State.RemoveResource(ctx)
-
tflog.Info(ctx, "SQLServer Flex user deleted")
}
@@ -489,61 +423,23 @@ func (r *userResource) ImportState(
req resource.ImportStateRequest,
resp *resource.ImportStateResponse,
) {
- ctx = core.InitProviderContext(ctx)
-
- if req.ID != "" {
- idParts := strings.Split(req.ID, core.Separator)
-
- if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" {
- core.LogAndAddError(
- ctx, &resp.Diagnostics,
- "Error importing user",
- fmt.Sprintf(
- "Expected import identifier with format [project_id],[region],[instance_id],[user_id], got %q",
- req.ID,
- ),
- )
- return
- }
-
- userId, err := strconv.ParseInt(idParts[3], 10, 64)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error importing user",
- fmt.Sprintf("Invalid user_id format: %q. It must be a valid integer.", idParts[3]),
- )
- return
- }
-
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userId)...)
-
- tflog.Info(ctx, "SQLServer Flex user state imported")
-
+ idParts := strings.Split(req.ID, core.Separator)
+ if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" {
+ core.LogAndAddError(
+ ctx, &resp.Diagnostics,
+ "Error importing user",
+ fmt.Sprintf(
+ "Expected import identifier with format [project_id],[region],[instance_id],[user_id], got %q",
+ req.ID,
+ ),
+ )
return
}
- // If no ID is provided, attempt to read identity attributes from the import configuration
- var identityData UserResourceIdentityModel
- resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- projectId := identityData.ProjectID.ValueString()
- region := identityData.Region.ValueString()
- instanceId := identityData.InstanceID.ValueString()
- userId := identityData.UserID.ValueInt64()
-
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userId)...)
-
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), idParts[3])...)
core.LogAndAddWarning(
ctx,
&resp.Diagnostics,
@@ -552,3 +448,118 @@ func (r *userResource) ImportState(
)
tflog.Info(ctx, "SQLServer Flex user state imported")
}
+
+func mapFieldsCreate(userResp *sqlserverflexalpha.CreateUserResponse, model *Model, region string) error {
+ if userResp == nil {
+ return fmt.Errorf("response is nil")
+ }
+ if model == nil {
+ return fmt.Errorf("model input is nil")
+ }
+ user := userResp
+
+ if user.Id == nil {
+ return fmt.Errorf("user id not present")
+ }
+ userId := *user.Id
+ model.Id = utils.BuildInternalTerraformId(
+ model.ProjectId.ValueString(),
+ region,
+ model.InstanceId.ValueString(),
+ strconv.FormatInt(userId, 10),
+ )
+ model.UserId = types.Int64Value(userId)
+ model.Username = types.StringPointerValue(user.Username)
+
+ if user.Password == nil {
+ return fmt.Errorf("user password not present")
+ }
+ model.Password = types.StringValue(*user.Password)
+
+ if user.Roles != nil {
+ var roles []attr.Value
+ for _, role := range *user.Roles {
+ roles = append(roles, types.StringValue(string(role)))
+ }
+ rolesSet, diags := types.SetValue(types.StringType, roles)
+ if diags.HasError() {
+ return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags))
+ }
+ model.Roles = rolesSet
+ }
+
+ if model.Roles.IsNull() || model.Roles.IsUnknown() {
+ model.Roles = types.SetNull(types.StringType)
+ }
+
+ model.Host = types.StringPointerValue(user.Host)
+ model.Port = types.Int64PointerValue(user.Port)
+ model.Region = types.StringValue(region)
+ model.Status = types.StringPointerValue(user.Status)
+ model.DefaultDatabase = types.StringPointerValue(user.DefaultDatabase)
+
+ return nil
+}
+
+func mapFields(userResp *sqlserverflexalpha.GetUserResponse, model *Model, region string) error {
+ if userResp == nil {
+ return fmt.Errorf("response is nil")
+ }
+ if model == nil {
+ return fmt.Errorf("model input is nil")
+ }
+ user := userResp
+
+ var userId int64
+ if model.UserId.ValueInt64() != 0 {
+ userId = model.UserId.ValueInt64()
+ } else if user.Id != nil {
+ userId = *user.Id
+ } else {
+ return fmt.Errorf("user id not present")
+ }
+ model.Id = utils.BuildInternalTerraformId(
+ model.ProjectId.ValueString(),
+ region,
+ model.InstanceId.ValueString(),
+ strconv.FormatInt(userId, 10),
+ )
+ model.UserId = types.Int64Value(userId)
+ model.Username = types.StringPointerValue(user.Username)
+
+ if user.Roles != nil {
+ var roles []attr.Value
+ for _, role := range *user.Roles {
+ roles = append(roles, types.StringValue(string(role)))
+ }
+ rolesSet, diags := types.SetValue(types.StringType, roles)
+ if diags.HasError() {
+ return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags))
+ }
+ model.Roles = rolesSet
+ }
+
+ if model.Roles.IsNull() || model.Roles.IsUnknown() {
+ model.Roles = types.SetNull(types.StringType)
+ }
+
+ model.Host = types.StringPointerValue(user.Host)
+ model.Port = types.Int64PointerValue(user.Port)
+ model.Region = types.StringValue(region)
+ return nil
+}
+
+func toCreatePayload(
+ model *Model,
+ roles []sqlserverflexalpha.UserRole,
+) (*sqlserverflexalpha.CreateUserRequestPayload, error) {
+ if model == nil {
+ return nil, fmt.Errorf("nil model")
+ }
+
+ return &sqlserverflexalpha.CreateUserRequestPayload{
+ Username: conversion.StringValueToPointer(model.Username),
+ DefaultDatabase: conversion.StringValueToPointer(model.DefaultDatabase),
+ Roles: &roles,
+ }, nil
+}
diff --git a/stackit/internal/services/sqlserverflexalpha/user/resource_test.go b/stackit/internal/services/sqlserverflexalpha/user/resource_test.go
new file mode 100644
index 00000000..ad6bbf5a
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexalpha/user/resource_test.go
@@ -0,0 +1,385 @@
+package sqlserverflexalpha
+
+import (
+ "testing"
+
+ "github.com/google/go-cmp/cmp"
+ "github.com/hashicorp/terraform-plugin-framework/attr"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/stackitcloud/stackit-sdk-go/core/utils"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
+)
+
+func TestMapFieldsCreate(t *testing.T) {
+ const testRegion = "region"
+ tests := []struct {
+ description string
+ input *sqlserverflexalpha.CreateUserResponse
+ region string
+ expected Model
+ isValid bool
+ }{
+ {
+ "default_values",
+ &sqlserverflexalpha.CreateUserResponse{
+ Id: utils.Ptr(int64(1)),
+ Password: utils.Ptr(""),
+ },
+ testRegion,
+ Model{
+ Id: types.StringValue("pid,region,iid,1"),
+ UserId: types.Int64Value(1),
+ InstanceId: types.StringValue("iid"),
+ ProjectId: types.StringValue("pid"),
+ Username: types.StringNull(),
+ Roles: types.SetNull(types.StringType),
+ Password: types.StringValue(""),
+ Host: types.StringNull(),
+ Port: types.Int64Null(),
+ Region: types.StringValue(testRegion),
+ },
+ true,
+ },
+ {
+ "simple_values",
+ &sqlserverflexalpha.CreateUserResponse{
+ Id: utils.Ptr(int64(2)),
+ Roles: &[]sqlserverflexalpha.UserRole{
+ "role_1",
+ "role_2",
+ "",
+ },
+ Username: utils.Ptr("username"),
+ Password: utils.Ptr("password"),
+ Host: utils.Ptr("host"),
+ Port: utils.Ptr(int64(1234)),
+ Status: utils.Ptr("status"),
+ DefaultDatabase: utils.Ptr("default_db"),
+ },
+ testRegion,
+ Model{
+ Id: types.StringValue("pid,region,iid,2"),
+ UserId: types.Int64Value(2),
+ InstanceId: types.StringValue("iid"),
+ ProjectId: types.StringValue("pid"),
+ Username: types.StringValue("username"),
+ Roles: types.SetValueMust(
+ types.StringType, []attr.Value{
+ types.StringValue("role_1"),
+ types.StringValue("role_2"),
+ types.StringValue(""),
+ },
+ ),
+ Password: types.StringValue("password"),
+ Host: types.StringValue("host"),
+ Port: types.Int64Value(1234),
+ Region: types.StringValue(testRegion),
+ Status: types.StringValue("status"),
+ DefaultDatabase: types.StringValue("default_db"),
+ },
+ true,
+ },
+ {
+ "null_fields_and_int_conversions",
+ &sqlserverflexalpha.CreateUserResponse{
+ Id: utils.Ptr(int64(3)),
+ Roles: &[]sqlserverflexalpha.UserRole{},
+ Username: nil,
+ Password: utils.Ptr(""),
+ Host: nil,
+ Port: utils.Ptr(int64(2123456789)),
+ },
+ testRegion,
+ Model{
+ Id: types.StringValue("pid,region,iid,3"),
+ UserId: types.Int64Value(3),
+ InstanceId: types.StringValue("iid"),
+ ProjectId: types.StringValue("pid"),
+ Username: types.StringNull(),
+ Roles: types.SetValueMust(types.StringType, []attr.Value{}),
+ Password: types.StringValue(""),
+ Host: types.StringNull(),
+ Port: types.Int64Value(2123456789),
+ Region: types.StringValue(testRegion),
+ DefaultDatabase: types.StringNull(),
+ Status: types.StringNull(),
+ },
+ true,
+ },
+ {
+ "nil_response",
+ nil,
+ testRegion,
+ Model{},
+ false,
+ },
+ {
+ "nil_response_2",
+ &sqlserverflexalpha.CreateUserResponse{},
+ testRegion,
+ Model{},
+ false,
+ },
+ {
+ "no_resource_id",
+ &sqlserverflexalpha.CreateUserResponse{},
+ testRegion,
+ Model{},
+ false,
+ },
+ {
+ "no_password",
+ &sqlserverflexalpha.CreateUserResponse{
+ Id: utils.Ptr(int64(1)),
+ },
+ testRegion,
+ Model{},
+ false,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(
+ tt.description, func(t *testing.T) {
+ state := &Model{
+ ProjectId: tt.expected.ProjectId,
+ InstanceId: tt.expected.InstanceId,
+ }
+ err := mapFieldsCreate(tt.input, state, tt.region)
+ if !tt.isValid && err == nil {
+ t.Fatalf("Should have failed")
+ }
+ if tt.isValid && err != nil {
+ t.Fatalf("Should not have failed: %v", err)
+ }
+ if tt.isValid {
+ diff := cmp.Diff(state, &tt.expected)
+ if diff != "" {
+ t.Fatalf("Data does not match: %s", diff)
+ }
+ }
+ },
+ )
+ }
+}
+
+func TestMapFields(t *testing.T) {
+ const testRegion = "region"
+ tests := []struct {
+ description string
+ input *sqlserverflexalpha.GetUserResponse
+ region string
+ expected Model
+ isValid bool
+ }{
+ {
+ "default_values",
+ &sqlserverflexalpha.GetUserResponse{},
+ testRegion,
+ Model{
+ Id: types.StringValue("pid,region,iid,1"),
+ UserId: types.Int64Value(1),
+ InstanceId: types.StringValue("iid"),
+ ProjectId: types.StringValue("pid"),
+ Username: types.StringNull(),
+ Roles: types.SetNull(types.StringType),
+ Host: types.StringNull(),
+ Port: types.Int64Null(),
+ Region: types.StringValue(testRegion),
+ },
+ true,
+ },
+ {
+ "simple_values",
+ &sqlserverflexalpha.GetUserResponse{
+ Roles: &[]sqlserverflexalpha.UserRole{
+ "role_1",
+ "role_2",
+ "",
+ },
+ Username: utils.Ptr("username"),
+ Host: utils.Ptr("host"),
+ Port: utils.Ptr(int64(1234)),
+ },
+ testRegion,
+ Model{
+ Id: types.StringValue("pid,region,iid,2"),
+ UserId: types.Int64Value(2),
+ InstanceId: types.StringValue("iid"),
+ ProjectId: types.StringValue("pid"),
+ Username: types.StringValue("username"),
+ Roles: types.SetValueMust(
+ types.StringType, []attr.Value{
+ types.StringValue("role_1"),
+ types.StringValue("role_2"),
+ types.StringValue(""),
+ },
+ ),
+ Host: types.StringValue("host"),
+ Port: types.Int64Value(1234),
+ Region: types.StringValue(testRegion),
+ },
+ true,
+ },
+ {
+ "null_fields_and_int_conversions",
+ &sqlserverflexalpha.GetUserResponse{
+ Id: utils.Ptr(int64(1)),
+ Roles: &[]sqlserverflexalpha.UserRole{},
+ Username: nil,
+ Host: nil,
+ Port: utils.Ptr(int64(2123456789)),
+ },
+ testRegion,
+ Model{
+ Id: types.StringValue("pid,region,iid,1"),
+ UserId: types.Int64Value(1),
+ InstanceId: types.StringValue("iid"),
+ ProjectId: types.StringValue("pid"),
+ Username: types.StringNull(),
+ Roles: types.SetValueMust(types.StringType, []attr.Value{}),
+ Host: types.StringNull(),
+ Port: types.Int64Value(2123456789),
+ Region: types.StringValue(testRegion),
+ },
+ true,
+ },
+ {
+ "nil_response",
+ nil,
+ testRegion,
+ Model{},
+ false,
+ },
+ {
+ "nil_response_2",
+ &sqlserverflexalpha.GetUserResponse{},
+ testRegion,
+ Model{},
+ false,
+ },
+ {
+ "no_resource_id",
+ &sqlserverflexalpha.GetUserResponse{},
+ testRegion,
+ Model{},
+ false,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(
+ tt.description, func(t *testing.T) {
+ state := &Model{
+ ProjectId: tt.expected.ProjectId,
+ InstanceId: tt.expected.InstanceId,
+ UserId: tt.expected.UserId,
+ }
+ err := mapFields(tt.input, state, tt.region)
+ if !tt.isValid && err == nil {
+ t.Fatalf("Should have failed")
+ }
+ if tt.isValid && err != nil {
+ t.Fatalf("Should not have failed: %v", err)
+ }
+ if tt.isValid {
+ diff := cmp.Diff(state, &tt.expected)
+ if diff != "" {
+ t.Fatalf("Data does not match: %s", diff)
+ }
+ }
+ },
+ )
+ }
+}
+
+func TestToCreatePayload(t *testing.T) {
+ tests := []struct {
+ description string
+ input *Model
+ inputRoles []sqlserverflexalpha.UserRole
+ expected *sqlserverflexalpha.CreateUserRequestPayload
+ isValid bool
+ }{
+ {
+ "default_values",
+ &Model{},
+ []sqlserverflexalpha.UserRole{},
+ &sqlserverflexalpha.CreateUserRequestPayload{
+ Roles: &[]sqlserverflexalpha.UserRole{},
+ Username: nil,
+ },
+ true,
+ },
+ {
+ "default_values",
+ &Model{
+ Username: types.StringValue("username"),
+ },
+ []sqlserverflexalpha.UserRole{
+ "role_1",
+ "role_2",
+ },
+ &sqlserverflexalpha.CreateUserRequestPayload{
+ Roles: &[]sqlserverflexalpha.UserRole{
+ "role_1",
+ "role_2",
+ },
+ Username: utils.Ptr("username"),
+ },
+ true,
+ },
+ {
+ "null_fields_and_int_conversions",
+ &Model{
+ Username: types.StringNull(),
+ },
+ []sqlserverflexalpha.UserRole{
+ "",
+ },
+ &sqlserverflexalpha.CreateUserRequestPayload{
+ Roles: &[]sqlserverflexalpha.UserRole{
+ "",
+ },
+ Username: nil,
+ },
+ true,
+ },
+ {
+ "nil_model",
+ nil,
+ []sqlserverflexalpha.UserRole{},
+ nil,
+ false,
+ },
+ {
+ "nil_roles",
+ &Model{
+ Username: types.StringValue("username"),
+ },
+ []sqlserverflexalpha.UserRole{},
+ &sqlserverflexalpha.CreateUserRequestPayload{
+ Roles: &[]sqlserverflexalpha.UserRole{},
+ Username: utils.Ptr("username"),
+ },
+ true,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(
+ tt.description, func(t *testing.T) {
+ output, err := toCreatePayload(tt.input, tt.inputRoles)
+ if !tt.isValid && err == nil {
+ t.Fatalf("Should have failed")
+ }
+ if tt.isValid && err != nil {
+ t.Fatalf("Should not have failed: %v", err)
+ }
+ if tt.isValid {
+ diff := cmp.Diff(output, tt.expected)
+ if diff != "" {
+ t.Fatalf("Data does not match: %s", diff)
+ }
+ }
+ },
+ )
+ }
+}
diff --git a/stackit/internal/services/sqlserverflexalpha/user/resources_gen/user_resource_gen.go b/stackit/internal/services/sqlserverflexalpha/user/resources_gen/user_resource_gen.go
index b316b020..2b456e79 100644
--- a/stackit/internal/services/sqlserverflexalpha/user/resources_gen/user_resource_gen.go
+++ b/stackit/internal/services/sqlserverflexalpha/user/resources_gen/user_resource_gen.go
@@ -66,8 +66,8 @@ func UserResourceSchema(ctx context.Context) schema.Schema {
"roles": schema.ListAttribute{
ElementType: types.StringType,
Required: true,
- Description: "A list containing the user roles for the instance. A list with the valid user roles can be retrieved using the List Roles endpoint.",
- MarkdownDescription: "A list containing the user roles for the instance. A list with the valid user roles can be retrieved using the List Roles endpoint.",
+ Description: "A list containing the user roles for the instance.",
+ MarkdownDescription: "A list containing the user roles for the instance.",
},
"status": schema.StringAttribute{
Computed: true,
diff --git a/stackit/internal/services/sqlserverflexalpha/utils/util.go b/stackit/internal/services/sqlserverflexalpha/utils/util.go
index 86dc18ac..4180955b 100644
--- a/stackit/internal/services/sqlserverflexalpha/utils/util.go
+++ b/stackit/internal/services/sqlserverflexalpha/utils/util.go
@@ -1,14 +1,15 @@
+// Copyright (c) STACKIT
+
package utils
import (
"context"
"fmt"
- sqlserverflex "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3alpha1api"
+ sqlserverflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
"github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/stackitcloud/stackit-sdk-go/core/config"
-
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
)
diff --git a/stackit/internal/services/sqlserverflexalpha/utils/util_test.go b/stackit/internal/services/sqlserverflexalpha/utils/util_test.go
index 43ec71d1..7818408d 100644
--- a/stackit/internal/services/sqlserverflexalpha/utils/util_test.go
+++ b/stackit/internal/services/sqlserverflexalpha/utils/util_test.go
@@ -1,3 +1,5 @@
+// Copyright (c) STACKIT
+
package utils
import (
@@ -9,8 +11,7 @@ import (
"github.com/hashicorp/terraform-plugin-framework/diag"
sdkClients "github.com/stackitcloud/stackit-sdk-go/core/clients"
"github.com/stackitcloud/stackit-sdk-go/core/config"
-
- sqlserverflex "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3alpha1api"
+ sqlserverflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
diff --git a/stackit/internal/services/sqlserverflexalpha/version/datasource.go b/stackit/internal/services/sqlserverflexalpha/version/datasource.go
new file mode 100644
index 00000000..707ba2f9
--- /dev/null
+++ b/stackit/internal/services/sqlserverflexalpha/version/datasource.go
@@ -0,0 +1,71 @@
+package sqlserverflexalpha
+
+import (
+ "context"
+
+ "github.com/hashicorp/terraform-plugin-framework/datasource"
+ "github.com/hashicorp/terraform-plugin-log/tflog"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
+ sqlserverflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils"
+
+ sqlserverflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/version/datasources_gen"
+)
+
+var (
+ _ datasource.DataSource = (*versionDataSource)(nil)
+ _ datasource.DataSourceWithConfigure = (*versionDataSource)(nil)
+)
+
+func NewVersionDataSource() datasource.DataSource {
+ return &versionDataSource{}
+}
+
+type versionDataSource struct {
+ client *sqlserverflexalpha.APIClient
+ providerData core.ProviderData
+}
+
+func (d *versionDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
+ resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_version"
+}
+
+func (d *versionDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
+ resp.Schema = sqlserverflexalphaGen.VersionDataSourceSchema(ctx)
+}
+
+// Configure adds the provider configured client to the data source.
+func (d *versionDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
+ var ok bool
+ d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
+ if !ok {
+ return
+ }
+
+ apiClient := sqlserverflexUtils.ConfigureClient(ctx, &d.providerData, &resp.Diagnostics)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+ d.client = apiClient
+ tflog.Info(ctx, "SQL SERVER Flex version client configured")
+}
+
+func (d *versionDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
+ var data sqlserverflexalphaGen.VersionModel
+
+ // Read Terraform configuration data into the model
+ resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ // Todo: Read API call logic
+
+ // Example data value setting
+ // data.Id = types.StringValue("example-id")
+
+ // Save data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+}
diff --git a/stackit/internal/services/sqlserverflexalpha/versions/datasources_gen/version_data_source_gen.go b/stackit/internal/services/sqlserverflexalpha/version/datasources_gen/version_data_source_gen.go
similarity index 100%
rename from stackit/internal/services/sqlserverflexalpha/versions/datasources_gen/version_data_source_gen.go
rename to stackit/internal/services/sqlserverflexalpha/version/datasources_gen/version_data_source_gen.go
diff --git a/stackit/internal/services/sqlserverflexbeta/database/datasource.go b/stackit/internal/services/sqlserverflexbeta/database/datasource.go
deleted file mode 100644
index dae9b2af..00000000
--- a/stackit/internal/services/sqlserverflexbeta/database/datasource.go
+++ /dev/null
@@ -1,175 +0,0 @@
-package sqlserverflexbeta
-
-import (
- "context"
- "fmt"
- "net/http"
-
- "github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-
- sqlserverflexbetaPkg "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3beta1api"
-
- sqlserverflexbetaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/database/datasources_gen"
-)
-
-var _ datasource.DataSource = (*databaseDataSource)(nil)
-
-const errorPrefix = "[Sqlserverflexbeta - Database]"
-
-func NewDatabaseDataSource() datasource.DataSource {
- return &databaseDataSource{}
-}
-
-type dataSourceModel struct {
- sqlserverflexbetaGen.DatabaseModel
- TerraformId types.String `tfsdk:"id"`
-}
-
-type databaseDataSource struct {
- client *sqlserverflexbetaPkg.APIClient
- providerData core.ProviderData
-}
-
-func (d *databaseDataSource) Metadata(
- _ context.Context,
- req datasource.MetadataRequest,
- resp *datasource.MetadataResponse,
-) {
- resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_database"
-}
-
-func (d *databaseDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- resp.Schema = sqlserverflexbetaGen.DatabaseDataSourceSchema(ctx)
- resp.Schema.Attributes["id"] = schema.StringAttribute{
- Computed: true,
- Description: "The terraform internal identifier.",
- MarkdownDescription: "The terraform internal identifier.",
- }
-}
-
-// Configure adds the provider configured client to the data source.
-func (d *databaseDataSource) Configure(
- ctx context.Context,
- req datasource.ConfigureRequest,
- resp *datasource.ConfigureResponse,
-) {
- var ok bool
- d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
- if !ok {
- return
- }
-
- apiClientConfigOptions := []config.ConfigurationOption{
- config.WithCustomAuth(d.providerData.RoundTripper),
- utils.UserAgentConfigOption(d.providerData.Version),
- }
- if d.providerData.SQLServerFlexCustomEndpoint != "" {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithEndpoint(d.providerData.SQLServerFlexCustomEndpoint),
- )
- } else {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithRegion(d.providerData.GetRegion()),
- )
- }
- apiClient, err := sqlserverflexbetaPkg.NewAPIClient(apiClientConfigOptions...)
- if err != nil {
- resp.Diagnostics.AddError(
- "Error configuring API client",
- fmt.Sprintf(
- "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
- err,
- ),
- )
- return
- }
- d.client = apiClient
- tflog.Info(ctx, fmt.Sprintf("%s client configured", errorPrefix))
-}
-
-func (d *databaseDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var data dataSourceModel
- // Read Terraform configuration data into the model
- resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- // Extract identifiers from the plan
- projectId := data.ProjectId.ValueString()
- region := d.providerData.GetRegionWithOverride(data.Region)
- instanceId := data.InstanceId.ValueString()
-
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
-
- databaseName := data.DatabaseName.ValueString()
-
- databaseResp, err := d.client.DefaultAPI.GetDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute()
- if err != nil {
- handleReadError(ctx, &resp.Diagnostics, err, projectId, instanceId)
- resp.State.RemoveResource(ctx)
- return
- }
-
- ctx = core.LogResponse(ctx)
- // Map response body to schema and populate Computed attribute values
- err = mapFields(databaseResp, &data, region)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error reading database",
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
-
- // Save data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-
- tflog.Info(ctx, "SQL Server Flex beta database read")
-}
-
-// handleReadError centralizes API error handling for the Read operation.
-func handleReadError(ctx context.Context, diags *diag.Diagnostics, err error, projectId, instanceId string) {
- utils.LogError(
- ctx,
- diags,
- err,
- "Reading database",
- fmt.Sprintf(
- "Could not retrieve database for instance %q in project %q.",
- instanceId,
- projectId,
- ),
- map[int]string{
- http.StatusBadRequest: fmt.Sprintf(
- "Invalid request parameters for project %q and instance %q.",
- projectId,
- instanceId,
- ),
- http.StatusNotFound: fmt.Sprintf(
- "Database, instance %q, or project %q not found.",
- instanceId,
- projectId,
- ),
- http.StatusForbidden: fmt.Sprintf("Forbidden access to project %q.", projectId),
- },
- )
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/database/datasources_gen/database_data_source_gen.go b/stackit/internal/services/sqlserverflexbeta/database/datasources_gen/database_data_source_gen.go
deleted file mode 100644
index 92b1064e..00000000
--- a/stackit/internal/services/sqlserverflexbeta/database/datasources_gen/database_data_source_gen.go
+++ /dev/null
@@ -1,81 +0,0 @@
-// Code generated by terraform-plugin-framework-generator DO NOT EDIT.
-
-package sqlserverflexbeta
-
-import (
- "context"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "github.com/hashicorp/terraform-plugin-framework/types"
-
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
-)
-
-func DatabaseDataSourceSchema(ctx context.Context) schema.Schema {
- return schema.Schema{
- Attributes: map[string]schema.Attribute{
- "collation_name": schema.StringAttribute{
- Computed: true,
- Description: "The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.",
- MarkdownDescription: "The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.",
- },
- "compatibility_level": schema.Int64Attribute{
- Computed: true,
- Description: "CompatibilityLevel of the Database.",
- MarkdownDescription: "CompatibilityLevel of the Database.",
- },
- "database_name": schema.StringAttribute{
- Required: true,
- Description: "The name of the database.",
- MarkdownDescription: "The name of the database.",
- },
- "tf_original_api_id": schema.Int64Attribute{
- Computed: true,
- Description: "The id of the database.",
- MarkdownDescription: "The id of the database.",
- },
- "instance_id": schema.StringAttribute{
- Required: true,
- Description: "The ID of the instance.",
- MarkdownDescription: "The ID of the instance.",
- },
- "name": schema.StringAttribute{
- Computed: true,
- Description: "The name of the database.",
- MarkdownDescription: "The name of the database.",
- },
- "owner": schema.StringAttribute{
- Computed: true,
- Description: "The owner of the database.",
- MarkdownDescription: "The owner of the database.",
- },
- "project_id": schema.StringAttribute{
- Required: true,
- Description: "The STACKIT project ID.",
- MarkdownDescription: "The STACKIT project ID.",
- },
- "region": schema.StringAttribute{
- Required: true,
- Description: "The region which should be addressed",
- MarkdownDescription: "The region which should be addressed",
- Validators: []validator.String{
- stringvalidator.OneOf(
- "eu01",
- ),
- },
- },
- },
- }
-}
-
-type DatabaseModel struct {
- CollationName types.String `tfsdk:"collation_name"`
- CompatibilityLevel types.Int64 `tfsdk:"compatibility_level"`
- DatabaseName types.String `tfsdk:"database_name"`
- Id types.Int64 `tfsdk:"tf_original_api_id"`
- InstanceId types.String `tfsdk:"instance_id"`
- Name types.String `tfsdk:"name"`
- Owner types.String `tfsdk:"owner"`
- ProjectId types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/database/datasources_gen/databases_data_source_gen.go b/stackit/internal/services/sqlserverflexbeta/database/datasources_gen/databases_data_source_gen.go
deleted file mode 100644
index 71ec8fb4..00000000
--- a/stackit/internal/services/sqlserverflexbeta/database/datasources_gen/databases_data_source_gen.go
+++ /dev/null
@@ -1,1180 +0,0 @@
-// Code generated by terraform-plugin-framework-generator DO NOT EDIT.
-
-package sqlserverflexbeta
-
-import (
- "context"
- "fmt"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
- "github.com/hashicorp/terraform-plugin-go/tftypes"
- "strings"
-
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
-)
-
-func DatabasesDataSourceSchema(ctx context.Context) schema.Schema {
- return schema.Schema{
- Attributes: map[string]schema.Attribute{
- "databases": schema.ListNestedAttribute{
- NestedObject: schema.NestedAttributeObject{
- Attributes: map[string]schema.Attribute{
- "created": schema.StringAttribute{
- Computed: true,
- Description: "The date when the database was created in RFC3339 format.",
- MarkdownDescription: "The date when the database was created in RFC3339 format.",
- },
- "id": schema.Int64Attribute{
- Computed: true,
- Description: "The id of the database.",
- MarkdownDescription: "The id of the database.",
- },
- "name": schema.StringAttribute{
- Computed: true,
- Description: "The name of the database.",
- MarkdownDescription: "The name of the database.",
- },
- "owner": schema.StringAttribute{
- Computed: true,
- Description: "The owner of the database.",
- MarkdownDescription: "The owner of the database.",
- },
- },
- CustomType: DatabasesType{
- ObjectType: types.ObjectType{
- AttrTypes: DatabasesValue{}.AttributeTypes(ctx),
- },
- },
- },
- Computed: true,
- Description: "A list containing all databases for the instance.",
- MarkdownDescription: "A list containing all databases for the instance.",
- },
- "instance_id": schema.StringAttribute{
- Required: true,
- Description: "The ID of the instance.",
- MarkdownDescription: "The ID of the instance.",
- },
- "page": schema.Int64Attribute{
- Optional: true,
- Computed: true,
- Description: "Number of the page of items list to be returned.",
- MarkdownDescription: "Number of the page of items list to be returned.",
- },
- "pagination": schema.SingleNestedAttribute{
- Attributes: map[string]schema.Attribute{
- "page": schema.Int64Attribute{
- Computed: true,
- },
- "size": schema.Int64Attribute{
- Computed: true,
- },
- "sort": schema.StringAttribute{
- Computed: true,
- },
- "total_pages": schema.Int64Attribute{
- Computed: true,
- },
- "total_rows": schema.Int64Attribute{
- Computed: true,
- },
- },
- CustomType: PaginationType{
- ObjectType: types.ObjectType{
- AttrTypes: PaginationValue{}.AttributeTypes(ctx),
- },
- },
- Computed: true,
- },
- "project_id": schema.StringAttribute{
- Required: true,
- Description: "The STACKIT project ID.",
- MarkdownDescription: "The STACKIT project ID.",
- },
- "region": schema.StringAttribute{
- Required: true,
- Description: "The region which should be addressed",
- MarkdownDescription: "The region which should be addressed",
- Validators: []validator.String{
- stringvalidator.OneOf(
- "eu01",
- ),
- },
- },
- "size": schema.Int64Attribute{
- Optional: true,
- Computed: true,
- Description: "Number of items to be returned on each page.",
- MarkdownDescription: "Number of items to be returned on each page.",
- },
- "sort": schema.StringAttribute{
- Optional: true,
- Computed: true,
- Description: "Sorting of the databases to be returned on each page.",
- MarkdownDescription: "Sorting of the databases to be returned on each page.",
- Validators: []validator.String{
- stringvalidator.OneOf(
- "created_at.desc",
- "created_at.asc",
- "database_id.desc",
- "database_id.asc",
- "database_name.desc",
- "database_name.asc",
- "database_owner.desc",
- "database_owner.asc",
- "index.asc",
- "index.desc",
- ),
- },
- },
- },
- }
-}
-
-type DatabasesModel struct {
- Databases types.List `tfsdk:"databases"`
- InstanceId types.String `tfsdk:"instance_id"`
- Page types.Int64 `tfsdk:"page"`
- Pagination PaginationValue `tfsdk:"pagination"`
- ProjectId types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- Size types.Int64 `tfsdk:"size"`
- Sort types.String `tfsdk:"sort"`
-}
-
-var _ basetypes.ObjectTypable = DatabasesType{}
-
-type DatabasesType struct {
- basetypes.ObjectType
-}
-
-func (t DatabasesType) Equal(o attr.Type) bool {
- other, ok := o.(DatabasesType)
-
- if !ok {
- return false
- }
-
- return t.ObjectType.Equal(other.ObjectType)
-}
-
-func (t DatabasesType) String() string {
- return "DatabasesType"
-}
-
-func (t DatabasesType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributes := in.Attributes()
-
- createdAttribute, ok := attributes["created"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `created is missing from object`)
-
- return nil, diags
- }
-
- createdVal, ok := createdAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`created expected to be basetypes.StringValue, was: %T`, createdAttribute))
- }
-
- idAttribute, ok := attributes["id"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `id is missing from object`)
-
- return nil, diags
- }
-
- idVal, ok := idAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`id expected to be basetypes.Int64Value, was: %T`, idAttribute))
- }
-
- nameAttribute, ok := attributes["name"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `name is missing from object`)
-
- return nil, diags
- }
-
- nameVal, ok := nameAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`name expected to be basetypes.StringValue, was: %T`, nameAttribute))
- }
-
- ownerAttribute, ok := attributes["owner"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `owner is missing from object`)
-
- return nil, diags
- }
-
- ownerVal, ok := ownerAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`owner expected to be basetypes.StringValue, was: %T`, ownerAttribute))
- }
-
- if diags.HasError() {
- return nil, diags
- }
-
- return DatabasesValue{
- Created: createdVal,
- Id: idVal,
- Name: nameVal,
- Owner: ownerVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewDatabasesValueNull() DatabasesValue {
- return DatabasesValue{
- state: attr.ValueStateNull,
- }
-}
-
-func NewDatabasesValueUnknown() DatabasesValue {
- return DatabasesValue{
- state: attr.ValueStateUnknown,
- }
-}
-
-func NewDatabasesValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (DatabasesValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
- ctx := context.Background()
-
- for name, attributeType := range attributeTypes {
- attribute, ok := attributes[name]
-
- if !ok {
- diags.AddError(
- "Missing DatabasesValue Attribute Value",
- "While creating a DatabasesValue value, a missing attribute value was detected. "+
- "A DatabasesValue must contain values for all attributes, even if null or unknown. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("DatabasesValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
- )
-
- continue
- }
-
- if !attributeType.Equal(attribute.Type(ctx)) {
- diags.AddError(
- "Invalid DatabasesValue Attribute Type",
- "While creating a DatabasesValue value, an invalid attribute value was detected. "+
- "A DatabasesValue must use a matching attribute type for the value. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("DatabasesValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
- fmt.Sprintf("DatabasesValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
- )
- }
- }
-
- for name := range attributes {
- _, ok := attributeTypes[name]
-
- if !ok {
- diags.AddError(
- "Extra DatabasesValue Attribute Value",
- "While creating a DatabasesValue value, an extra attribute value was detected. "+
- "A DatabasesValue must not contain values beyond the expected attribute types. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("Extra DatabasesValue Attribute Name: %s", name),
- )
- }
- }
-
- if diags.HasError() {
- return NewDatabasesValueUnknown(), diags
- }
-
- createdAttribute, ok := attributes["created"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `created is missing from object`)
-
- return NewDatabasesValueUnknown(), diags
- }
-
- createdVal, ok := createdAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`created expected to be basetypes.StringValue, was: %T`, createdAttribute))
- }
-
- idAttribute, ok := attributes["id"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `id is missing from object`)
-
- return NewDatabasesValueUnknown(), diags
- }
-
- idVal, ok := idAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`id expected to be basetypes.Int64Value, was: %T`, idAttribute))
- }
-
- nameAttribute, ok := attributes["name"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `name is missing from object`)
-
- return NewDatabasesValueUnknown(), diags
- }
-
- nameVal, ok := nameAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`name expected to be basetypes.StringValue, was: %T`, nameAttribute))
- }
-
- ownerAttribute, ok := attributes["owner"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `owner is missing from object`)
-
- return NewDatabasesValueUnknown(), diags
- }
-
- ownerVal, ok := ownerAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`owner expected to be basetypes.StringValue, was: %T`, ownerAttribute))
- }
-
- if diags.HasError() {
- return NewDatabasesValueUnknown(), diags
- }
-
- return DatabasesValue{
- Created: createdVal,
- Id: idVal,
- Name: nameVal,
- Owner: ownerVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewDatabasesValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) DatabasesValue {
- object, diags := NewDatabasesValue(attributeTypes, attributes)
-
- if diags.HasError() {
- // This could potentially be added to the diag package.
- diagsStrings := make([]string, 0, len(diags))
-
- for _, diagnostic := range diags {
- diagsStrings = append(diagsStrings, fmt.Sprintf(
- "%s | %s | %s",
- diagnostic.Severity(),
- diagnostic.Summary(),
- diagnostic.Detail()))
- }
-
- panic("NewDatabasesValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
- }
-
- return object
-}
-
-func (t DatabasesType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
- if in.Type() == nil {
- return NewDatabasesValueNull(), nil
- }
-
- if !in.Type().Equal(t.TerraformType(ctx)) {
- return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
- }
-
- if !in.IsKnown() {
- return NewDatabasesValueUnknown(), nil
- }
-
- if in.IsNull() {
- return NewDatabasesValueNull(), nil
- }
-
- attributes := map[string]attr.Value{}
-
- val := map[string]tftypes.Value{}
-
- err := in.As(&val)
-
- if err != nil {
- return nil, err
- }
-
- for k, v := range val {
- a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
-
- if err != nil {
- return nil, err
- }
-
- attributes[k] = a
- }
-
- return NewDatabasesValueMust(DatabasesValue{}.AttributeTypes(ctx), attributes), nil
-}
-
-func (t DatabasesType) ValueType(ctx context.Context) attr.Value {
- return DatabasesValue{}
-}
-
-var _ basetypes.ObjectValuable = DatabasesValue{}
-
-type DatabasesValue struct {
- Created basetypes.StringValue `tfsdk:"created"`
- Id basetypes.Int64Value `tfsdk:"id"`
- Name basetypes.StringValue `tfsdk:"name"`
- Owner basetypes.StringValue `tfsdk:"owner"`
- state attr.ValueState
-}
-
-func (v DatabasesValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 4)
-
- var val tftypes.Value
- var err error
-
- attrTypes["created"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["id"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["name"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["owner"] = basetypes.StringType{}.TerraformType(ctx)
-
- objectType := tftypes.Object{AttributeTypes: attrTypes}
-
- switch v.state {
- case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 4)
-
- val, err = v.Created.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["created"] = val
-
- val, err = v.Id.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["id"] = val
-
- val, err = v.Name.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["name"] = val
-
- val, err = v.Owner.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["owner"] = val
-
- if err := tftypes.ValidateValue(objectType, vals); err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- return tftypes.NewValue(objectType, vals), nil
- case attr.ValueStateNull:
- return tftypes.NewValue(objectType, nil), nil
- case attr.ValueStateUnknown:
- return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
- default:
- panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
- }
-}
-
-func (v DatabasesValue) IsNull() bool {
- return v.state == attr.ValueStateNull
-}
-
-func (v DatabasesValue) IsUnknown() bool {
- return v.state == attr.ValueStateUnknown
-}
-
-func (v DatabasesValue) String() string {
- return "DatabasesValue"
-}
-
-func (v DatabasesValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributeTypes := map[string]attr.Type{
- "created": basetypes.StringType{},
- "id": basetypes.Int64Type{},
- "name": basetypes.StringType{},
- "owner": basetypes.StringType{},
- }
-
- if v.IsNull() {
- return types.ObjectNull(attributeTypes), diags
- }
-
- if v.IsUnknown() {
- return types.ObjectUnknown(attributeTypes), diags
- }
-
- objVal, diags := types.ObjectValue(
- attributeTypes,
- map[string]attr.Value{
- "created": v.Created,
- "id": v.Id,
- "name": v.Name,
- "owner": v.Owner,
- })
-
- return objVal, diags
-}
-
-func (v DatabasesValue) Equal(o attr.Value) bool {
- other, ok := o.(DatabasesValue)
-
- if !ok {
- return false
- }
-
- if v.state != other.state {
- return false
- }
-
- if v.state != attr.ValueStateKnown {
- return true
- }
-
- if !v.Created.Equal(other.Created) {
- return false
- }
-
- if !v.Id.Equal(other.Id) {
- return false
- }
-
- if !v.Name.Equal(other.Name) {
- return false
- }
-
- if !v.Owner.Equal(other.Owner) {
- return false
- }
-
- return true
-}
-
-func (v DatabasesValue) Type(ctx context.Context) attr.Type {
- return DatabasesType{
- basetypes.ObjectType{
- AttrTypes: v.AttributeTypes(ctx),
- },
- }
-}
-
-func (v DatabasesValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
- return map[string]attr.Type{
- "created": basetypes.StringType{},
- "id": basetypes.Int64Type{},
- "name": basetypes.StringType{},
- "owner": basetypes.StringType{},
- }
-}
-
-var _ basetypes.ObjectTypable = PaginationType{}
-
-type PaginationType struct {
- basetypes.ObjectType
-}
-
-func (t PaginationType) Equal(o attr.Type) bool {
- other, ok := o.(PaginationType)
-
- if !ok {
- return false
- }
-
- return t.ObjectType.Equal(other.ObjectType)
-}
-
-func (t PaginationType) String() string {
- return "PaginationType"
-}
-
-func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributes := in.Attributes()
-
- pageAttribute, ok := attributes["page"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `page is missing from object`)
-
- return nil, diags
- }
-
- pageVal, ok := pageAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
- }
-
- sizeAttribute, ok := attributes["size"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `size is missing from object`)
-
- return nil, diags
- }
-
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
- }
-
- sortAttribute, ok := attributes["sort"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `sort is missing from object`)
-
- return nil, diags
- }
-
- sortVal, ok := sortAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute))
- }
-
- totalPagesAttribute, ok := attributes["total_pages"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `total_pages is missing from object`)
-
- return nil, diags
- }
-
- totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
- }
-
- totalRowsAttribute, ok := attributes["total_rows"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `total_rows is missing from object`)
-
- return nil, diags
- }
-
- totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
- }
-
- if diags.HasError() {
- return nil, diags
- }
-
- return PaginationValue{
- Page: pageVal,
- Size: sizeVal,
- Sort: sortVal,
- TotalPages: totalPagesVal,
- TotalRows: totalRowsVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewPaginationValueNull() PaginationValue {
- return PaginationValue{
- state: attr.ValueStateNull,
- }
-}
-
-func NewPaginationValueUnknown() PaginationValue {
- return PaginationValue{
- state: attr.ValueStateUnknown,
- }
-}
-
-func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (PaginationValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
- ctx := context.Background()
-
- for name, attributeType := range attributeTypes {
- attribute, ok := attributes[name]
-
- if !ok {
- diags.AddError(
- "Missing PaginationValue Attribute Value",
- "While creating a PaginationValue value, a missing attribute value was detected. "+
- "A PaginationValue must contain values for all attributes, even if null or unknown. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
- )
-
- continue
- }
-
- if !attributeType.Equal(attribute.Type(ctx)) {
- diags.AddError(
- "Invalid PaginationValue Attribute Type",
- "While creating a PaginationValue value, an invalid attribute value was detected. "+
- "A PaginationValue must use a matching attribute type for the value. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
- fmt.Sprintf("PaginationValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
- )
- }
- }
-
- for name := range attributes {
- _, ok := attributeTypes[name]
-
- if !ok {
- diags.AddError(
- "Extra PaginationValue Attribute Value",
- "While creating a PaginationValue value, an extra attribute value was detected. "+
- "A PaginationValue must not contain values beyond the expected attribute types. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("Extra PaginationValue Attribute Name: %s", name),
- )
- }
- }
-
- if diags.HasError() {
- return NewPaginationValueUnknown(), diags
- }
-
- pageAttribute, ok := attributes["page"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `page is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- pageVal, ok := pageAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
- }
-
- sizeAttribute, ok := attributes["size"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `size is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
- }
-
- sortAttribute, ok := attributes["sort"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `sort is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- sortVal, ok := sortAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute))
- }
-
- totalPagesAttribute, ok := attributes["total_pages"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `total_pages is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
- }
-
- totalRowsAttribute, ok := attributes["total_rows"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `total_rows is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
- }
-
- if diags.HasError() {
- return NewPaginationValueUnknown(), diags
- }
-
- return PaginationValue{
- Page: pageVal,
- Size: sizeVal,
- Sort: sortVal,
- TotalPages: totalPagesVal,
- TotalRows: totalRowsVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewPaginationValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) PaginationValue {
- object, diags := NewPaginationValue(attributeTypes, attributes)
-
- if diags.HasError() {
- // This could potentially be added to the diag package.
- diagsStrings := make([]string, 0, len(diags))
-
- for _, diagnostic := range diags {
- diagsStrings = append(diagsStrings, fmt.Sprintf(
- "%s | %s | %s",
- diagnostic.Severity(),
- diagnostic.Summary(),
- diagnostic.Detail()))
- }
-
- panic("NewPaginationValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
- }
-
- return object
-}
-
-func (t PaginationType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
- if in.Type() == nil {
- return NewPaginationValueNull(), nil
- }
-
- if !in.Type().Equal(t.TerraformType(ctx)) {
- return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
- }
-
- if !in.IsKnown() {
- return NewPaginationValueUnknown(), nil
- }
-
- if in.IsNull() {
- return NewPaginationValueNull(), nil
- }
-
- attributes := map[string]attr.Value{}
-
- val := map[string]tftypes.Value{}
-
- err := in.As(&val)
-
- if err != nil {
- return nil, err
- }
-
- for k, v := range val {
- a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
-
- if err != nil {
- return nil, err
- }
-
- attributes[k] = a
- }
-
- return NewPaginationValueMust(PaginationValue{}.AttributeTypes(ctx), attributes), nil
-}
-
-func (t PaginationType) ValueType(ctx context.Context) attr.Value {
- return PaginationValue{}
-}
-
-var _ basetypes.ObjectValuable = PaginationValue{}
-
-type PaginationValue struct {
- Page basetypes.Int64Value `tfsdk:"page"`
- Size basetypes.Int64Value `tfsdk:"size"`
- Sort basetypes.StringValue `tfsdk:"sort"`
- TotalPages basetypes.Int64Value `tfsdk:"total_pages"`
- TotalRows basetypes.Int64Value `tfsdk:"total_rows"`
- state attr.ValueState
-}
-
-func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 5)
-
- var val tftypes.Value
- var err error
-
- attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx)
-
- objectType := tftypes.Object{AttributeTypes: attrTypes}
-
- switch v.state {
- case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 5)
-
- val, err = v.Page.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["page"] = val
-
- val, err = v.Size.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["size"] = val
-
- val, err = v.Sort.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["sort"] = val
-
- val, err = v.TotalPages.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["total_pages"] = val
-
- val, err = v.TotalRows.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["total_rows"] = val
-
- if err := tftypes.ValidateValue(objectType, vals); err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- return tftypes.NewValue(objectType, vals), nil
- case attr.ValueStateNull:
- return tftypes.NewValue(objectType, nil), nil
- case attr.ValueStateUnknown:
- return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
- default:
- panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
- }
-}
-
-func (v PaginationValue) IsNull() bool {
- return v.state == attr.ValueStateNull
-}
-
-func (v PaginationValue) IsUnknown() bool {
- return v.state == attr.ValueStateUnknown
-}
-
-func (v PaginationValue) String() string {
- return "PaginationValue"
-}
-
-func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributeTypes := map[string]attr.Type{
- "page": basetypes.Int64Type{},
- "size": basetypes.Int64Type{},
- "sort": basetypes.StringType{},
- "total_pages": basetypes.Int64Type{},
- "total_rows": basetypes.Int64Type{},
- }
-
- if v.IsNull() {
- return types.ObjectNull(attributeTypes), diags
- }
-
- if v.IsUnknown() {
- return types.ObjectUnknown(attributeTypes), diags
- }
-
- objVal, diags := types.ObjectValue(
- attributeTypes,
- map[string]attr.Value{
- "page": v.Page,
- "size": v.Size,
- "sort": v.Sort,
- "total_pages": v.TotalPages,
- "total_rows": v.TotalRows,
- })
-
- return objVal, diags
-}
-
-func (v PaginationValue) Equal(o attr.Value) bool {
- other, ok := o.(PaginationValue)
-
- if !ok {
- return false
- }
-
- if v.state != other.state {
- return false
- }
-
- if v.state != attr.ValueStateKnown {
- return true
- }
-
- if !v.Page.Equal(other.Page) {
- return false
- }
-
- if !v.Size.Equal(other.Size) {
- return false
- }
-
- if !v.Sort.Equal(other.Sort) {
- return false
- }
-
- if !v.TotalPages.Equal(other.TotalPages) {
- return false
- }
-
- if !v.TotalRows.Equal(other.TotalRows) {
- return false
- }
-
- return true
-}
-
-func (v PaginationValue) Type(ctx context.Context) attr.Type {
- return PaginationType{
- basetypes.ObjectType{
- AttrTypes: v.AttributeTypes(ctx),
- },
- }
-}
-
-func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
- return map[string]attr.Type{
- "page": basetypes.Int64Type{},
- "size": basetypes.Int64Type{},
- "sort": basetypes.StringType{},
- "total_pages": basetypes.Int64Type{},
- "total_rows": basetypes.Int64Type{},
- }
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/database/mapper.go b/stackit/internal/services/sqlserverflexbeta/database/mapper.go
deleted file mode 100644
index 991fad58..00000000
--- a/stackit/internal/services/sqlserverflexbeta/database/mapper.go
+++ /dev/null
@@ -1,105 +0,0 @@
-package sqlserverflexbeta
-
-import (
- "fmt"
-
- "github.com/hashicorp/terraform-plugin-framework/types"
- utils2 "github.com/stackitcloud/stackit-sdk-go/core/utils"
-
- sqlserverflexbeta "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3beta1api"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-)
-
-// mapFields maps fields from a ListDatabase API response to a resourceModel for the data source.
-func mapFields(source *sqlserverflexbeta.GetDatabaseResponse, model *dataSourceModel, region string) error {
- if source == nil {
- return fmt.Errorf("response is nil")
- }
- if source.Id == 0 {
- return fmt.Errorf("id not present")
- }
- if model == nil {
- return fmt.Errorf("model given is nil")
- }
-
- var databaseId int64
- if model.Id.ValueInt64() != 0 {
- databaseId = model.Id.ValueInt64()
- } else if source.Id != 0 {
- databaseId = source.Id
- } else {
- return fmt.Errorf("database id not present")
- }
-
- model.Id = types.Int64Value(databaseId)
- model.DatabaseName = types.StringValue(source.GetName())
- model.Name = types.StringValue(source.GetName())
- model.Owner = types.StringValue(source.GetOwner())
- model.Region = types.StringValue(region)
- model.ProjectId = types.StringValue(model.ProjectId.ValueString())
- model.InstanceId = types.StringValue(model.InstanceId.ValueString())
- model.CompatibilityLevel = types.Int64Value(int64(source.GetCompatibilityLevel()))
- model.CollationName = types.StringValue(source.GetCollationName())
-
- model.TerraformId = utils.BuildInternalTerraformId(
- model.ProjectId.ValueString(),
- region,
- model.InstanceId.ValueString(),
- model.DatabaseName.ValueString(),
- )
-
- return nil
-}
-
-// mapResourceFields maps fields from a ListDatabase API response to a resourceModel for the resource.
-func mapResourceFields(source *sqlserverflexbeta.GetDatabaseResponse, model *resourceModel, region string) error {
- if source == nil {
- return fmt.Errorf("response is nil")
- }
- if source.Id == 0 {
- return fmt.Errorf("id not present")
- }
- if model == nil {
- return fmt.Errorf("model input is nil")
- }
-
- var databaseId int64
- if model.Id.ValueInt64() != 0 {
- databaseId = model.Id.ValueInt64()
- } else if source.Id != 0 {
- databaseId = source.Id
- } else {
- return fmt.Errorf("database id not present")
- }
-
- model.Id = types.Int64Value(databaseId)
- model.DatabaseName = types.StringValue(source.GetName())
- model.Name = types.StringValue(source.GetName())
- model.Owner = types.StringValue(source.GetOwner())
- model.Region = types.StringValue(region)
- model.ProjectId = types.StringValue(model.ProjectId.ValueString())
- model.InstanceId = types.StringValue(model.InstanceId.ValueString())
-
- model.Compatibility = types.Int64Value(int64(source.GetCompatibilityLevel()))
- model.CompatibilityLevel = types.Int64Value(int64(source.GetCompatibilityLevel()))
-
- model.Collation = types.StringValue(source.GetCollationName()) // it does not come back from api
- model.CollationName = types.StringValue(source.GetCollationName())
-
- return nil
-}
-
-// toCreatePayload converts the resource model to an API create payload.
-func toCreatePayload(model *resourceModel) (*sqlserverflexbeta.CreateDatabaseRequestPayload, error) {
- if model == nil {
- return nil, fmt.Errorf("nil model")
- }
-
- return &sqlserverflexbeta.CreateDatabaseRequestPayload{
- Name: model.Name.ValueString(),
- Owner: model.Owner.ValueString(),
- Collation: model.Collation.ValueStringPointer(),
- Compatibility: utils2.Ptr(int32(model.Compatibility.ValueInt64())), //nolint:gosec // TODO
- }, nil
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/database/mapper_test.go b/stackit/internal/services/sqlserverflexbeta/database/mapper_test.go
deleted file mode 100644
index 2fad7615..00000000
--- a/stackit/internal/services/sqlserverflexbeta/database/mapper_test.go
+++ /dev/null
@@ -1,233 +0,0 @@
-package sqlserverflexbeta
-
-import (
- "testing"
-
- "github.com/google/go-cmp/cmp"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/stackitcloud/stackit-sdk-go/core/utils"
- "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3beta1api"
-
- datasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/database/datasources_gen"
-)
-
-func TestMapFields(t *testing.T) {
- type given struct {
- source *v3beta1api.GetDatabaseResponse
- model *dataSourceModel
- region string
- }
- type expected struct {
- model *dataSourceModel
- err bool
- }
-
- testcases := []struct {
- name string
- given given
- expected expected
- }{
- {
- name: "should map fields correctly",
- given: given{
- source: &v3beta1api.GetDatabaseResponse{
- Id: int64(1),
- Name: "my-db",
- CollationName: "collation",
- CompatibilityLevel: int32(150),
- Owner: "my-owner",
- },
- model: &dataSourceModel{
- DatabaseModel: datasource.DatabaseModel{
- ProjectId: types.StringValue("my-project"),
- InstanceId: types.StringValue("my-instance"),
- },
- },
- region: "eu01",
- },
- expected: expected{
- model: &dataSourceModel{
- DatabaseModel: datasource.DatabaseModel{
- Id: types.Int64Value(1),
- Name: types.StringValue("my-db"),
- DatabaseName: types.StringValue("my-db"),
- Owner: types.StringValue("my-owner"),
- Region: types.StringValue("eu01"),
- InstanceId: types.StringValue("my-instance"),
- ProjectId: types.StringValue("my-project"),
- CompatibilityLevel: types.Int64Value(150),
- CollationName: types.StringValue("collation"),
- },
- TerraformId: types.StringValue("my-project,eu01,my-instance,my-db"),
- },
- },
- },
- {
- name: "should fail on nil source",
- given: given{
- source: nil,
- model: &dataSourceModel{},
- },
- expected: expected{err: true},
- },
- {
- name: "should fail on nil source ID",
- given: given{
- source: &v3beta1api.GetDatabaseResponse{Id: 0},
- model: &dataSourceModel{},
- },
- expected: expected{err: true},
- },
- {
- name: "should fail on nil model",
- given: given{
- source: &v3beta1api.GetDatabaseResponse{Id: int64(1)},
- model: nil,
- },
- expected: expected{err: true},
- },
- }
-
- for _, tc := range testcases {
- t.Run(
- tc.name, func(t *testing.T) {
- err := mapFields(tc.given.source, tc.given.model, tc.given.region)
- if (err != nil) != tc.expected.err {
- t.Fatalf("expected error: %v, got: %v", tc.expected.err, err)
- }
- if err == nil {
- if diff := cmp.Diff(tc.expected.model, tc.given.model); diff != "" {
- t.Errorf("model mismatch (-want +got):\n%s", diff)
- }
- }
- },
- )
- }
-}
-
-func TestMapResourceFields(t *testing.T) {
- type given struct {
- source *v3beta1api.GetDatabaseResponse
- model *resourceModel
- region string
- }
- type expected struct {
- model *resourceModel
- err bool
- }
-
- testcases := []struct {
- name string
- given given
- expected expected
- }{
- {
- name: "should map fields correctly",
- given: given{
- source: &v3beta1api.GetDatabaseResponse{
- Id: (int64(1)),
- Name: ("my-db"),
- Owner: ("my-owner"),
- },
- model: &resourceModel{
- ProjectId: types.StringValue("my-project"),
- InstanceId: types.StringValue("my-instance"),
- },
- region: "eu01",
- },
- expected: expected{
- model: &resourceModel{
- Id: types.Int64Value(1),
- Name: types.StringValue("my-db"),
- Compatibility: types.Int64Value(0),
- CompatibilityLevel: types.Int64Value(0),
- Collation: types.StringValue(""),
- CollationName: types.StringValue(""),
- DatabaseName: types.StringValue("my-db"),
- InstanceId: types.StringValue("my-instance"),
- ProjectId: types.StringValue("my-project"),
- Region: types.StringValue("eu01"),
- Owner: types.StringValue("my-owner"),
- },
- },
- },
- {
- name: "should fail on nil source",
- given: given{
- source: nil,
- model: &resourceModel{},
- },
- expected: expected{err: true},
- },
- }
-
- for _, tc := range testcases {
- t.Run(
- tc.name, func(t *testing.T) {
- err := mapResourceFields(tc.given.source, tc.given.model, tc.given.region)
- if (err != nil) != tc.expected.err {
- t.Fatalf("expected error: %v, got: %v", tc.expected.err, err)
- }
- if err == nil {
- if diff := cmp.Diff(tc.expected.model, tc.given.model); diff != "" {
- t.Errorf("model mismatch (-want +got):\n%s", diff)
- }
- }
- },
- )
- }
-}
-
-func TestToCreatePayload(t *testing.T) {
- type given struct {
- model *resourceModel
- }
- type expected struct {
- payload *v3beta1api.CreateDatabaseRequestPayload
- err bool
- }
-
- testcases := []struct {
- name string
- given given
- expected expected
- }{
- {
- name: "should convert model to payload",
- given: given{
- model: &resourceModel{
- Name: types.StringValue("my-db"),
- Owner: types.StringValue("my-owner"),
- },
- },
- expected: expected{
- payload: &v3beta1api.CreateDatabaseRequestPayload{
- Name: "my-db",
- Owner: "my-owner",
- Compatibility: utils.Ptr(int32(0)),
- },
- },
- },
- {
- name: "should fail on nil model",
- given: given{model: nil},
- expected: expected{err: true},
- },
- }
-
- for _, tc := range testcases {
- t.Run(
- tc.name, func(t *testing.T) {
- actual, err := toCreatePayload(tc.given.model)
- if (err != nil) != tc.expected.err {
- t.Fatalf("expected error: %v, got: %v", tc.expected.err, err)
- }
- if err == nil {
- if diff := cmp.Diff(tc.expected.payload, actual); diff != "" {
- t.Errorf("payload mismatch (-want +got):\n%s", diff)
- }
- }
- },
- )
- }
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/database/planModifiers.yaml b/stackit/internal/services/sqlserverflexbeta/database/planModifiers.yaml
deleted file mode 100644
index 08d7e6cf..00000000
--- a/stackit/internal/services/sqlserverflexbeta/database/planModifiers.yaml
+++ /dev/null
@@ -1,56 +0,0 @@
-fields:
- - name: 'id'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'instance_id'
- validators:
- - validate.NoSeparator
- - validate.UUID
- modifiers:
- - 'RequiresReplace'
-
- - name: 'project_id'
- validators:
- - validate.NoSeparator
- - validate.UUID
- modifiers:
- - 'RequiresReplace'
-
- - name: 'region'
- modifiers:
- - 'RequiresReplace'
-
- - name: 'name'
- modifiers:
- - 'RequiresReplace'
-
- - name: 'collation'
- modifiers:
- - 'RequiresReplace'
-
- - name: 'owner'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'database_name'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'collation_name'
- modifiers:
- - 'RequiresReplace'
- - 'UseStateForUnknown'
-
- - name: 'compatibility'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'compatibility_level'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
diff --git a/stackit/internal/services/sqlserverflexbeta/database/resource.go b/stackit/internal/services/sqlserverflexbeta/database/resource.go
deleted file mode 100644
index b8ed1cad..00000000
--- a/stackit/internal/services/sqlserverflexbeta/database/resource.go
+++ /dev/null
@@ -1,559 +0,0 @@
-package sqlserverflexbeta
-
-import (
- "context"
- _ "embed"
- "errors"
- "fmt"
- "net/http"
- "strings"
- "time"
-
- "github.com/hashicorp/terraform-plugin-framework/path"
- "github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
- "github.com/stackitcloud/stackit-sdk-go/core/oapierror"
- utils2 "github.com/stackitcloud/stackit-sdk-go/core/utils"
-
- sqlserverflexbeta "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3beta1api"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- wait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexbeta"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-
- sqlserverflexbetaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/database/resources_gen"
-)
-
-var (
- _ resource.Resource = &databaseResource{}
- _ resource.ResourceWithConfigure = &databaseResource{}
- _ resource.ResourceWithImportState = &databaseResource{}
- _ resource.ResourceWithModifyPlan = &databaseResource{}
- _ resource.ResourceWithIdentity = &databaseResource{}
-
- // Define errors
- errDatabaseNotFound = errors.New("database not found")
-)
-
-func NewDatabaseResource() resource.Resource {
- return &databaseResource{}
-}
-
-// resourceModel describes the resource data model.
-type resourceModel = sqlserverflexbetaResGen.DatabaseModel
-
-type databaseResource struct {
- client *sqlserverflexbeta.APIClient
- providerData core.ProviderData
-}
-
-type DatabaseResourceIdentityModel struct {
- ProjectID types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- InstanceID types.String `tfsdk:"instance_id"`
- DatabaseName types.String `tfsdk:"database_name"`
-}
-
-func (r *databaseResource) Metadata(
- _ context.Context,
- req resource.MetadataRequest,
- resp *resource.MetadataResponse,
-) {
- resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_database"
-}
-
-//go:embed planModifiers.yaml
-var modifiersFileByte []byte
-
-func (r *databaseResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
- s := sqlserverflexbetaResGen.DatabaseResourceSchema(ctx)
-
- fields, err := utils.ReadModifiersConfig(modifiersFileByte)
- if err != nil {
- resp.Diagnostics.AddError("error during read modifiers config file", err.Error())
- return
- }
-
- err = utils.AddPlanModifiersToResourceSchema(fields, &s)
- if err != nil {
- resp.Diagnostics.AddError("error adding plan modifiers", err.Error())
- return
- }
- resp.Schema = s
-}
-
-func (r *databaseResource) IdentitySchema(
- _ context.Context,
- _ resource.IdentitySchemaRequest,
- resp *resource.IdentitySchemaResponse,
-) {
- resp.IdentitySchema = identityschema.Schema{
- Attributes: map[string]identityschema.Attribute{
- "project_id": identityschema.StringAttribute{
- RequiredForImport: true, // must be set during import by the practitioner
- },
- "region": identityschema.StringAttribute{
- RequiredForImport: true, // can be defaulted by the provider configuration
- },
- "instance_id": identityschema.StringAttribute{
- RequiredForImport: true, // can be defaulted by the provider configuration
- },
- "database_name": identityschema.StringAttribute{
- RequiredForImport: true, // can be defaulted by the provider configuration
- },
- },
- }
-}
-
-// Configure adds the provider configured client to the resource.
-func (r *databaseResource) Configure(
- ctx context.Context,
- req resource.ConfigureRequest,
- resp *resource.ConfigureResponse,
-) {
- var ok bool
- r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
- if !ok {
- return
- }
-
- apiClientConfigOptions := []config.ConfigurationOption{
- config.WithCustomAuth(r.providerData.RoundTripper),
- utils.UserAgentConfigOption(r.providerData.Version),
- }
- if r.providerData.SQLServerFlexCustomEndpoint != "" {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithEndpoint(r.providerData.SQLServerFlexCustomEndpoint),
- )
- } else {
- apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(r.providerData.GetRegion()))
- }
- apiClient, err := sqlserverflexbeta.NewAPIClient(apiClientConfigOptions...)
- if err != nil {
- resp.Diagnostics.AddError(
- "Error configuring API client",
- fmt.Sprintf(
- "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
- err,
- ),
- )
- return
- }
- r.client = apiClient
- tflog.Info(ctx, "sqlserverflexbeta.Database client configured")
-}
-
-func (r *databaseResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
- var data resourceModel
- createErr := "DB create error"
-
- // Read Terraform plan data into the model
- resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := data.ProjectId.ValueString()
- region := data.Region.ValueString()
- instanceId := data.InstanceId.ValueString()
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
-
- databaseName := data.Name.ValueString()
- ctx = tflog.SetField(ctx, "database_name", databaseName)
-
- payLoad := sqlserverflexbeta.CreateDatabaseRequestPayload{}
- if !data.Collation.IsNull() && !data.Collation.IsUnknown() {
- payLoad.Collation = data.Collation.ValueStringPointer()
- }
-
- if !data.Compatibility.IsNull() && !data.Compatibility.IsUnknown() {
- payLoad.Compatibility = utils2.Ptr(int32(data.Compatibility.ValueInt64())) //nolint:gosec // TODO
- }
-
- payLoad.Name = data.Name.ValueString()
- payLoad.Owner = data.Owner.ValueString()
-
- _, err := wait.WaitForUserWaitHandler(
- ctx,
- r.client.DefaultAPI,
- projectId,
- instanceId,
- region,
- data.Owner.ValueString(),
- ).
- SetSleepBeforeWait(10 * time.Second).
- WaitWithContext(ctx)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- createErr,
- fmt.Sprintf("Calling API: %v", err),
- )
- return
- }
-
- createResp, err := r.client.DefaultAPI.CreateDatabaseRequest(ctx, projectId, region, instanceId).
- CreateDatabaseRequestPayload(payLoad).
- Execute()
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- createErr,
- fmt.Sprintf("Calling API: %v", err),
- )
- return
- }
-
- if createResp == nil || createResp.Id == 0 {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error creating database",
- "API didn't return database Id. A database might have been created",
- )
- return
- }
-
- databaseId := createResp.Id
-
- ctx = tflog.SetField(ctx, "database_id", databaseId)
-
- ctx = core.LogResponse(ctx)
-
- // Set data returned by API in identity
- identity := DatabaseResourceIdentityModel{
- ProjectID: types.StringValue(projectId),
- Region: types.StringValue(region),
- InstanceID: types.StringValue(instanceId),
- DatabaseName: types.StringValue(databaseName),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- waitResp, err := wait.CreateDatabaseWaitHandler(
- ctx,
- r.client.DefaultAPI,
- projectId,
- instanceId,
- region,
- databaseName,
- ).SetSleepBeforeWait(
- 30 * time.Second,
- ).SetTimeout(
- 15 * time.Minute,
- ).WaitWithContext(ctx)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- createErr,
- fmt.Sprintf("Database creation waiting: %v", err),
- )
- return
- }
-
- if waitResp.Id == 0 {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- createErr,
- "Database creation waiting: returned id is nil",
- )
- return
- }
-
- if waitResp.Id != databaseId {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- createErr,
- "Database creation waiting: returned id is different",
- )
- return
- }
-
- if waitResp.Owner != data.Owner.ValueString() {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- createErr,
- "Database creation waiting: returned owner is different",
- )
- return
- }
-
- if waitResp.Name != data.Name.ValueString() {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- createErr,
- "Database creation waiting: returned name is different",
- )
- return
- }
-
- // Map response body to schema
- err = mapResourceFields(waitResp, &data, region)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error creating database",
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
-
- // Set state to fully populated data
- resp.Diagnostics.Append(resp.State.Set(ctx, data)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- // Save data into Terraform state
-
- tflog.Info(ctx, "sqlserverflexbeta.Database created")
-}
-
-func (r *databaseResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
- var model resourceModel
- diags := req.State.Get(ctx, &model)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := model.ProjectId.ValueString()
- region := model.Region.ValueString()
- instanceId := model.InstanceId.ValueString()
- databaseName := model.DatabaseName.ValueString()
-
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
- ctx = tflog.SetField(ctx, "region", region)
- ctx = tflog.SetField(ctx, "database_name", databaseName)
-
- databaseResp, err := r.client.DefaultAPI.GetDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute()
- if err != nil {
- oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
- if (ok && oapiErr.StatusCode == http.StatusNotFound) || errors.Is(err, errDatabaseNotFound) {
- resp.State.RemoveResource(ctx)
- return
- }
- core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading database", fmt.Sprintf("Calling API: %v", err))
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- // Map response body to schema
- err = mapResourceFields(databaseResp, &model, region)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error reading database",
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
-
- // Save identity into Terraform state
- identity := DatabaseResourceIdentityModel{
- ProjectID: types.StringValue(projectId),
- Region: types.StringValue(region),
- InstanceID: types.StringValue(instanceId),
- DatabaseName: types.StringValue(databaseName),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- // Set refreshed state
- resp.Diagnostics.Append(resp.State.Set(ctx, &model)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- tflog.Info(ctx, "sqlserverflexbeta.Database read")
-}
-
-func (r *databaseResource) Update(ctx context.Context, _ resource.UpdateRequest, resp *resource.UpdateResponse) {
- // TODO: Check update api endpoint - not available at the moment, so return an error for now
- core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating database", "there is no way to update a database")
-}
-
-func (r *databaseResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
- // nolint:gocritic // function signature required by Terraform
- var model resourceModel
- diags := req.State.Get(ctx, &model)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- projectId := model.ProjectId.ValueString()
- region := model.Region.ValueString()
- instanceId := model.InstanceId.ValueString()
- databaseName := model.DatabaseName.ValueString()
-
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
- ctx = tflog.SetField(ctx, "region", region)
- ctx = tflog.SetField(ctx, "database_name", databaseName)
-
- // Delete existing record set
- err := r.client.DefaultAPI.DeleteDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute()
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error deleting database",
- fmt.Sprintf(
- "Calling API: %v\nname: %s, region: %s, instanceId: %s", err, databaseName, region, instanceId,
- ),
- )
- return
- }
-
- // TODO: wait handler??
-
- ctx = core.LogResponse(ctx)
- resp.State.RemoveResource(ctx)
-
- tflog.Info(ctx, "sqlserverflexbeta.Database deleted")
-}
-
-// ModifyPlan implements resource.ResourceWithModifyPlan.
-// Use the modifier to set the effective region in the current plan.
-func (r *databaseResource) ModifyPlan(
- ctx context.Context,
- req resource.ModifyPlanRequest,
- resp *resource.ModifyPlanResponse,
-) { // nolint:gocritic // function signature required by Terraform
- // skip initial empty configuration to avoid follow-up errors
- if req.Config.Raw.IsNull() {
- return
- }
-
- var configModel resourceModel
- resp.Diagnostics.Append(req.Config.Get(ctx, &configModel)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- var planModel resourceModel
- resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- utils.AdaptRegion(ctx, configModel.Region, &planModel.Region, r.providerData.GetRegion(), resp)
- if resp.Diagnostics.HasError() {
- return
- }
-
- var identityModel DatabaseResourceIdentityModel
- identityModel.ProjectID = planModel.ProjectId
- identityModel.Region = planModel.Region
-
- if !planModel.InstanceId.IsNull() && !planModel.InstanceId.IsUnknown() {
- identityModel.InstanceID = planModel.InstanceId
- }
-
- if !planModel.Name.IsNull() && !planModel.Name.IsUnknown() {
- identityModel.DatabaseName = planModel.Name
- }
-
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identityModel)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(resp.Plan.Set(ctx, planModel)...)
- if resp.Diagnostics.HasError() {
- return
- }
-}
-
-// ImportState imports a resource into the Terraform state on success.
-// The expected format of the resource import identifier is: project_id,zone_id,record_set_id
-func (r *databaseResource) ImportState(
- ctx context.Context,
- req resource.ImportStateRequest,
- resp *resource.ImportStateResponse,
-) {
- ctx = core.InitProviderContext(ctx)
-
- if req.ID != "" {
- idParts := strings.Split(req.ID, core.Separator)
-
- if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" {
- core.LogAndAddError(
- ctx, &resp.Diagnostics,
- "Error importing database",
- fmt.Sprintf(
- "Expected import identifier with format [project_id],[region],[instance_id],[database_name] Got: %q",
- req.ID,
- ),
- )
- return
- }
-
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("database_name"), idParts[3])...)
-
- var identityData DatabaseResourceIdentityModel
- identityData.ProjectID = types.StringValue(idParts[0])
- identityData.Region = types.StringValue(idParts[1])
- identityData.InstanceID = types.StringValue(idParts[2])
- identityData.DatabaseName = types.StringValue(idParts[3])
-
- resp.Diagnostics.Append(resp.Identity.Set(ctx, &identityData)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- tflog.Info(ctx, "Sqlserverflexbeta database state imported")
- return
- }
-
- // If no ID is provided, attempt to read identity attributes from the import configuration
- var identityData DatabaseResourceIdentityModel
- resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- projectId := identityData.ProjectID.ValueString()
- region := identityData.Region.ValueString()
- instanceId := identityData.InstanceID.ValueString()
- databaseName := identityData.DatabaseName.ValueString()
-
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("database_name"), databaseName)...)
-
- tflog.Info(ctx, "Sqlserverflexbeta database state imported")
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/database/resources_gen/database_resource_gen.go b/stackit/internal/services/sqlserverflexbeta/database/resources_gen/database_resource_gen.go
deleted file mode 100644
index dccae0c4..00000000
--- a/stackit/internal/services/sqlserverflexbeta/database/resources_gen/database_resource_gen.go
+++ /dev/null
@@ -1,99 +0,0 @@
-// Code generated by terraform-plugin-framework-generator DO NOT EDIT.
-
-package sqlserverflexbeta
-
-import (
- "context"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "github.com/hashicorp/terraform-plugin-framework/types"
-
- "github.com/hashicorp/terraform-plugin-framework/resource/schema"
-)
-
-func DatabaseResourceSchema(ctx context.Context) schema.Schema {
- return schema.Schema{
- Attributes: map[string]schema.Attribute{
- "collation": schema.StringAttribute{
- Optional: true,
- Computed: true,
- Description: "The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.",
- MarkdownDescription: "The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.",
- },
- "collation_name": schema.StringAttribute{
- Computed: true,
- Description: "The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.",
- MarkdownDescription: "The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.",
- },
- "compatibility": schema.Int64Attribute{
- Optional: true,
- Computed: true,
- Description: "CompatibilityLevel of the Database.",
- MarkdownDescription: "CompatibilityLevel of the Database.",
- },
- "compatibility_level": schema.Int64Attribute{
- Computed: true,
- Description: "CompatibilityLevel of the Database.",
- MarkdownDescription: "CompatibilityLevel of the Database.",
- },
- "database_name": schema.StringAttribute{
- Optional: true,
- Computed: true,
- Description: "The name of the database.",
- MarkdownDescription: "The name of the database.",
- },
- "id": schema.Int64Attribute{
- Computed: true,
- Description: "The id of the database.",
- MarkdownDescription: "The id of the database.",
- },
- "instance_id": schema.StringAttribute{
- Optional: true,
- Computed: true,
- Description: "The ID of the instance.",
- MarkdownDescription: "The ID of the instance.",
- },
- "name": schema.StringAttribute{
- Required: true,
- Description: "The name of the database.",
- MarkdownDescription: "The name of the database.",
- },
- "owner": schema.StringAttribute{
- Required: true,
- Description: "The owner of the database.",
- MarkdownDescription: "The owner of the database.",
- },
- "project_id": schema.StringAttribute{
- Optional: true,
- Computed: true,
- Description: "The STACKIT project ID.",
- MarkdownDescription: "The STACKIT project ID.",
- },
- "region": schema.StringAttribute{
- Optional: true,
- Computed: true,
- Description: "The region which should be addressed",
- MarkdownDescription: "The region which should be addressed",
- Validators: []validator.String{
- stringvalidator.OneOf(
- "eu01",
- ),
- },
- },
- },
- }
-}
-
-type DatabaseModel struct {
- Collation types.String `tfsdk:"collation"`
- CollationName types.String `tfsdk:"collation_name"`
- Compatibility types.Int64 `tfsdk:"compatibility"`
- CompatibilityLevel types.Int64 `tfsdk:"compatibility_level"`
- DatabaseName types.String `tfsdk:"database_name"`
- Id types.Int64 `tfsdk:"id"`
- InstanceId types.String `tfsdk:"instance_id"`
- Name types.String `tfsdk:"name"`
- Owner types.String `tfsdk:"owner"`
- ProjectId types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/flavor/datasource.go b/stackit/internal/services/sqlserverflexbeta/flavor/datasource.go
deleted file mode 100644
index 96ec3691..00000000
--- a/stackit/internal/services/sqlserverflexbeta/flavor/datasource.go
+++ /dev/null
@@ -1,356 +0,0 @@
-package sqlserverFlexBetaFlavor
-
-import (
- "context"
- "fmt"
-
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
- "github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-
- "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3beta1api"
-
- sqlserverflexbetaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/flavor/datasources_gen"
-)
-
-// Ensure the implementation satisfies the expected interfaces.
-var (
- _ datasource.DataSource = &flavorDataSource{}
- _ datasource.DataSourceWithConfigure = &flavorDataSource{}
-)
-
-type FlavorModel struct {
- ProjectId types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- StorageClass types.String `tfsdk:"storage_class"`
- Cpu types.Int64 `tfsdk:"cpu"`
- Description types.String `tfsdk:"description"`
- Id types.String `tfsdk:"id"`
- FlavorId types.String `tfsdk:"flavor_id"`
- MaxGb types.Int64 `tfsdk:"max_gb"`
- Memory types.Int64 `tfsdk:"ram"`
- MinGb types.Int64 `tfsdk:"min_gb"`
- NodeType types.String `tfsdk:"node_type"`
- StorageClasses types.List `tfsdk:"storage_classes"`
-}
-
-// NewFlavorDataSource is a helper function to simplify the provider implementation.
-func NewFlavorDataSource() datasource.DataSource {
- return &flavorDataSource{}
-}
-
-// flavorDataSource is the data source implementation.
-type flavorDataSource struct {
- client *v3beta1api.APIClient
- providerData core.ProviderData
-}
-
-// Metadata returns the data source type name.
-func (r *flavorDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_flavor"
-}
-
-// Configure adds the provider configured client to the data source.
-func (r *flavorDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
- var ok bool
- r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
- if !ok {
- return
- }
-
- apiClientConfigOptions := []config.ConfigurationOption{
- config.WithCustomAuth(r.providerData.RoundTripper),
- utils.UserAgentConfigOption(r.providerData.Version),
- }
- if r.providerData.SQLServerFlexCustomEndpoint != "" {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithEndpoint(r.providerData.SQLServerFlexCustomEndpoint),
- )
- } else {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithRegion(r.providerData.GetRegion()),
- )
- }
- apiClient, err := v3beta1api.NewAPIClient(apiClientConfigOptions...)
- if err != nil {
- resp.Diagnostics.AddError(
- "Error configuring API client",
- fmt.Sprintf(
- "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
- err,
- ),
- )
- return
- }
- r.client = apiClient
- tflog.Info(ctx, "SQL Server Flex instance client configured")
-}
-
-func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- resp.Schema = schema.Schema{
- Attributes: map[string]schema.Attribute{
- "project_id": schema.StringAttribute{
- Required: true,
- Description: "The project ID of the flavor.",
- MarkdownDescription: "The project ID of the flavor.",
- },
- "region": schema.StringAttribute{
- Required: true,
- Description: "The region of the flavor.",
- MarkdownDescription: "The region of the flavor.",
- },
- "cpu": schema.Int64Attribute{
- Required: true,
- Description: "The cpu count of the instance.",
- MarkdownDescription: "The cpu count of the instance.",
- },
- "ram": schema.Int64Attribute{
- Required: true,
- Description: "The memory of the instance in Gibibyte.",
- MarkdownDescription: "The memory of the instance in Gibibyte.",
- },
- "storage_class": schema.StringAttribute{
- Required: true,
- Description: "The memory of the instance in Gibibyte.",
- MarkdownDescription: "The memory of the instance in Gibibyte.",
- },
- "node_type": schema.StringAttribute{
- Required: true,
- Description: "defines the nodeType it can be either single or HA",
- MarkdownDescription: "defines the nodeType it can be either single or HA",
- },
- "flavor_id": schema.StringAttribute{
- Computed: true,
- Description: "The id of the instance flavor.",
- MarkdownDescription: "The id of the instance flavor.",
- },
- "description": schema.StringAttribute{
- Computed: true,
- Description: "The flavor description.",
- MarkdownDescription: "The flavor description.",
- },
- "id": schema.StringAttribute{
- Computed: true,
- Description: "The id of the instance flavor.",
- MarkdownDescription: "The id of the instance flavor.",
- },
- "max_gb": schema.Int64Attribute{
- Computed: true,
- Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
- MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
- },
- "min_gb": schema.Int64Attribute{
- Computed: true,
- Description: "minimum storage which is required to order in Gigabyte.",
- MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
- },
- "storage_classes": schema.ListNestedAttribute{
- NestedObject: schema.NestedAttributeObject{
- Attributes: map[string]schema.Attribute{
- "class": schema.StringAttribute{
- Computed: true,
- },
- "max_io_per_sec": schema.Int64Attribute{
- Computed: true,
- },
- "max_through_in_mb": schema.Int64Attribute{
- Computed: true,
- },
- },
- CustomType: sqlserverflexbetaGen.StorageClassesType{
- ObjectType: types.ObjectType{
- AttrTypes: sqlserverflexbetaGen.StorageClassesValue{}.AttributeTypes(ctx),
- },
- },
- },
- Computed: true,
- Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
- MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
- },
- },
- //Attributes: map[string]schema.Attribute{
- // "project_id": schema.StringAttribute{
- // Required: true,
- // Description: "The cpu count of the instance.",
- // MarkdownDescription: "The cpu count of the instance.",
- // },
- // "region": schema.StringAttribute{
- // Required: true,
- // Description: "The flavor description.",
- // MarkdownDescription: "The flavor description.",
- // },
- // "cpu": schema.Int64Attribute{
- // Required: true,
- // Description: "The cpu count of the instance.",
- // MarkdownDescription: "The cpu count of the instance.",
- // },
- // "ram": schema.Int64Attribute{
- // Required: true,
- // Description: "The memory of the instance in Gibibyte.",
- // MarkdownDescription: "The memory of the instance in Gibibyte.",
- // },
- // "storage_class": schema.StringAttribute{
- // Required: true,
- // Description: "The memory of the instance in Gibibyte.",
- // MarkdownDescription: "The memory of the instance in Gibibyte.",
- // },
- // "description": schema.StringAttribute{
- // Computed: true,
- // Description: "The flavor description.",
- // MarkdownDescription: "The flavor description.",
- // },
- // "id": schema.StringAttribute{
- // Computed: true,
- // Description: "The terraform id of the instance flavor.",
- // MarkdownDescription: "The terraform id of the instance flavor.",
- // },
- // "flavor_id": schema.StringAttribute{
- // Computed: true,
- // Description: "The flavor id of the instance flavor.",
- // MarkdownDescription: "The flavor id of the instance flavor.",
- // },
- // "max_gb": schema.Int64Attribute{
- // Computed: true,
- // Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
- // MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
- // },
- // "min_gb": schema.Int64Attribute{
- // Computed: true,
- // Description: "minimum storage which is required to order in Gigabyte.",
- // MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
- // },
- // "node_type": schema.StringAttribute{
- // Required: true,
- // Description: "defines the nodeType it can be either single or replica",
- // MarkdownDescription: "defines the nodeType it can be either single or replica",
- // },
- // "storage_classes": schema.ListNestedAttribute{
- // Computed: true,
- // NestedObject: schema.NestedAttributeObject{
- // Attributes: map[string]schema.Attribute{
- // "class": schema.StringAttribute{
- // Computed: true,
- // },
- // "max_io_per_sec": schema.Int64Attribute{
- // Computed: true,
- // },
- // "max_through_in_mb": schema.Int64Attribute{
- // Computed: true,
- // },
- // },
- // CustomType: sqlserverflexalphaGen.StorageClassesType{
- // ObjectType: types.ObjectType{
- // AttrTypes: sqlserverflexalphaGen.StorageClassesValue{}.AttributeTypes(ctx),
- // },
- // },
- // },
- // },
- // },
- }
-}
-
-func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var model FlavorModel
- diags := req.Config.Get(ctx, &model)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := model.ProjectId.ValueString()
- region := r.providerData.GetRegionWithOverride(model.Region)
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
-
- flavors, err := getAllFlavors(ctx, r.client.DefaultAPI, projectId, region)
- if err != nil {
- core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading flavors", fmt.Sprintf("getAllFlavors: %v", err))
- return
- }
-
- var foundFlavors []v3beta1api.ListFlavors
- for _, flavor := range flavors {
- if model.Cpu.ValueInt64() != flavor.Cpu {
- continue
- }
- if model.Memory.ValueInt64() != flavor.Memory {
- continue
- }
- if model.NodeType.ValueString() != flavor.NodeType {
- continue
- }
- for _, sc := range flavor.StorageClasses {
- if model.StorageClass.ValueString() != sc.Class {
- continue
- }
- foundFlavors = append(foundFlavors, flavor)
- }
- }
- if len(foundFlavors) == 0 {
- resp.Diagnostics.AddError("get flavor", "could not find requested flavor")
- return
- }
- if len(foundFlavors) > 1 {
- resp.Diagnostics.AddError("get flavor", "found too many matching flavors")
- return
- }
-
- f := foundFlavors[0]
- model.Description = types.StringValue(f.Description)
- model.Id = utils.BuildInternalTerraformId(model.ProjectId.ValueString(), region, f.Id)
- model.FlavorId = types.StringValue(f.Id)
- model.MaxGb = types.Int64Value(int64(f.MaxGB))
- model.MinGb = types.Int64Value(int64(f.MinGB))
-
- if f.StorageClasses == nil {
- model.StorageClasses = types.ListNull(sqlserverflexbetaGen.StorageClassesType{
- ObjectType: basetypes.ObjectType{
- AttrTypes: sqlserverflexbetaGen.StorageClassesValue{}.AttributeTypes(ctx),
- },
- })
- } else {
- var scList []attr.Value
- for _, sc := range f.StorageClasses {
- scList = append(
- scList,
- sqlserverflexbetaGen.NewStorageClassesValueMust(
- sqlserverflexbetaGen.StorageClassesValue{}.AttributeTypes(ctx),
- map[string]attr.Value{
- "class": types.StringValue(sc.Class),
- "max_io_per_sec": types.Int64Value(int64(sc.MaxIoPerSec)),
- "max_through_in_mb": types.Int64Value(int64(sc.MaxThroughInMb)),
- },
- ),
- )
- }
- storageClassesList := types.ListValueMust(
- sqlserverflexbetaGen.StorageClassesType{
- ObjectType: basetypes.ObjectType{
- AttrTypes: sqlserverflexbetaGen.StorageClassesValue{}.AttributeTypes(ctx),
- },
- },
- scList,
- )
- model.StorageClasses = storageClassesList
- }
-
- // Set refreshed state
- diags = resp.State.Set(ctx, model)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
- tflog.Info(ctx, "SQL Server Flex flavors read")
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/flavor/datasources_gen/flavor_data_source_gen.go b/stackit/internal/services/sqlserverflexbeta/flavor/datasources_gen/flavor_data_source_gen.go
deleted file mode 100644
index a766197e..00000000
--- a/stackit/internal/services/sqlserverflexbeta/flavor/datasources_gen/flavor_data_source_gen.go
+++ /dev/null
@@ -1,1909 +0,0 @@
-// Code generated by terraform-plugin-framework-generator DO NOT EDIT.
-
-package sqlserverflexbeta
-
-import (
- "context"
- "fmt"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
- "github.com/hashicorp/terraform-plugin-go/tftypes"
- "strings"
-
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
-)
-
-func FlavorDataSourceSchema(ctx context.Context) schema.Schema {
- return schema.Schema{
- Attributes: map[string]schema.Attribute{
- "flavors": schema.ListNestedAttribute{
- NestedObject: schema.NestedAttributeObject{
- Attributes: map[string]schema.Attribute{
- "cpu": schema.Int64Attribute{
- Computed: true,
- Description: "The cpu count of the instance.",
- MarkdownDescription: "The cpu count of the instance.",
- },
- "description": schema.StringAttribute{
- Computed: true,
- Description: "The flavor description.",
- MarkdownDescription: "The flavor description.",
- },
- "id": schema.StringAttribute{
- Computed: true,
- Description: "The id of the instance flavor.",
- MarkdownDescription: "The id of the instance flavor.",
- },
- "max_gb": schema.Int64Attribute{
- Computed: true,
- Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
- MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
- },
- "memory": schema.Int64Attribute{
- Computed: true,
- Description: "The memory of the instance in Gibibyte.",
- MarkdownDescription: "The memory of the instance in Gibibyte.",
- },
- "min_gb": schema.Int64Attribute{
- Computed: true,
- Description: "minimum storage which is required to order in Gigabyte.",
- MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
- },
- "node_type": schema.StringAttribute{
- Computed: true,
- Description: "defines the nodeType it can be either single or HA",
- MarkdownDescription: "defines the nodeType it can be either single or HA",
- },
- "storage_classes": schema.ListNestedAttribute{
- NestedObject: schema.NestedAttributeObject{
- Attributes: map[string]schema.Attribute{
- "class": schema.StringAttribute{
- Computed: true,
- },
- "max_io_per_sec": schema.Int64Attribute{
- Computed: true,
- },
- "max_through_in_mb": schema.Int64Attribute{
- Computed: true,
- },
- },
- CustomType: StorageClassesType{
- ObjectType: types.ObjectType{
- AttrTypes: StorageClassesValue{}.AttributeTypes(ctx),
- },
- },
- },
- Computed: true,
- Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
- MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
- },
- },
- CustomType: FlavorsType{
- ObjectType: types.ObjectType{
- AttrTypes: FlavorsValue{}.AttributeTypes(ctx),
- },
- },
- },
- Computed: true,
- Description: "List of flavors available for the project.",
- MarkdownDescription: "List of flavors available for the project.",
- },
- "page": schema.Int64Attribute{
- Optional: true,
- Computed: true,
- Description: "Number of the page of items list to be returned.",
- MarkdownDescription: "Number of the page of items list to be returned.",
- },
- "pagination": schema.SingleNestedAttribute{
- Attributes: map[string]schema.Attribute{
- "page": schema.Int64Attribute{
- Computed: true,
- },
- "size": schema.Int64Attribute{
- Computed: true,
- },
- "sort": schema.StringAttribute{
- Computed: true,
- },
- "total_pages": schema.Int64Attribute{
- Computed: true,
- },
- "total_rows": schema.Int64Attribute{
- Computed: true,
- },
- },
- CustomType: PaginationType{
- ObjectType: types.ObjectType{
- AttrTypes: PaginationValue{}.AttributeTypes(ctx),
- },
- },
- Computed: true,
- },
- "project_id": schema.StringAttribute{
- Required: true,
- Description: "The STACKIT project ID.",
- MarkdownDescription: "The STACKIT project ID.",
- },
- "region": schema.StringAttribute{
- Required: true,
- Description: "The region which should be addressed",
- MarkdownDescription: "The region which should be addressed",
- Validators: []validator.String{
- stringvalidator.OneOf(
- "eu01",
- ),
- },
- },
- "size": schema.Int64Attribute{
- Optional: true,
- Computed: true,
- Description: "Number of items to be returned on each page.",
- MarkdownDescription: "Number of items to be returned on each page.",
- },
- "sort": schema.StringAttribute{
- Optional: true,
- Computed: true,
- Description: "Sorting of the flavors to be returned on each page.",
- MarkdownDescription: "Sorting of the flavors to be returned on each page.",
- Validators: []validator.String{
- stringvalidator.OneOf(
- "index.desc",
- "index.asc",
- "cpu.desc",
- "cpu.asc",
- "flavor_description.asc",
- "flavor_description.desc",
- "id.desc",
- "id.asc",
- "size_max.desc",
- "size_max.asc",
- "ram.desc",
- "ram.asc",
- "size_min.desc",
- "size_min.asc",
- "storage_class.asc",
- "storage_class.desc",
- "node_type.asc",
- "node_type.desc",
- ),
- },
- },
- },
- }
-}
-
-type FlavorModel struct {
- Flavors types.List `tfsdk:"flavors"`
- Page types.Int64 `tfsdk:"page"`
- Pagination PaginationValue `tfsdk:"pagination"`
- ProjectId types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- Size types.Int64 `tfsdk:"size"`
- Sort types.String `tfsdk:"sort"`
-}
-
-var _ basetypes.ObjectTypable = FlavorsType{}
-
-type FlavorsType struct {
- basetypes.ObjectType
-}
-
-func (t FlavorsType) Equal(o attr.Type) bool {
- other, ok := o.(FlavorsType)
-
- if !ok {
- return false
- }
-
- return t.ObjectType.Equal(other.ObjectType)
-}
-
-func (t FlavorsType) String() string {
- return "FlavorsType"
-}
-
-func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributes := in.Attributes()
-
- cpuAttribute, ok := attributes["cpu"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `cpu is missing from object`)
-
- return nil, diags
- }
-
- cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
- }
-
- descriptionAttribute, ok := attributes["description"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `description is missing from object`)
-
- return nil, diags
- }
-
- descriptionVal, ok := descriptionAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`description expected to be basetypes.StringValue, was: %T`, descriptionAttribute))
- }
-
- idAttribute, ok := attributes["id"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `id is missing from object`)
-
- return nil, diags
- }
-
- idVal, ok := idAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`id expected to be basetypes.StringValue, was: %T`, idAttribute))
- }
-
- maxGbAttribute, ok := attributes["max_gb"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `max_gb is missing from object`)
-
- return nil, diags
- }
-
- maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
- }
-
- memoryAttribute, ok := attributes["memory"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `memory is missing from object`)
-
- return nil, diags
- }
-
- memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
- }
-
- minGbAttribute, ok := attributes["min_gb"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `min_gb is missing from object`)
-
- return nil, diags
- }
-
- minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
- }
-
- nodeTypeAttribute, ok := attributes["node_type"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `node_type is missing from object`)
-
- return nil, diags
- }
-
- nodeTypeVal, ok := nodeTypeAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`node_type expected to be basetypes.StringValue, was: %T`, nodeTypeAttribute))
- }
-
- storageClassesAttribute, ok := attributes["storage_classes"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `storage_classes is missing from object`)
-
- return nil, diags
- }
-
- storageClassesVal, ok := storageClassesAttribute.(basetypes.ListValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`storage_classes expected to be basetypes.ListValue, was: %T`, storageClassesAttribute))
- }
-
- if diags.HasError() {
- return nil, diags
- }
-
- return FlavorsValue{
- Cpu: cpuVal,
- Description: descriptionVal,
- Id: idVal,
- MaxGb: maxGbVal,
- Memory: memoryVal,
- MinGb: minGbVal,
- NodeType: nodeTypeVal,
- StorageClasses: storageClassesVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewFlavorsValueNull() FlavorsValue {
- return FlavorsValue{
- state: attr.ValueStateNull,
- }
-}
-
-func NewFlavorsValueUnknown() FlavorsValue {
- return FlavorsValue{
- state: attr.ValueStateUnknown,
- }
-}
-
-func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (FlavorsValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
- ctx := context.Background()
-
- for name, attributeType := range attributeTypes {
- attribute, ok := attributes[name]
-
- if !ok {
- diags.AddError(
- "Missing FlavorsValue Attribute Value",
- "While creating a FlavorsValue value, a missing attribute value was detected. "+
- "A FlavorsValue must contain values for all attributes, even if null or unknown. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("FlavorsValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
- )
-
- continue
- }
-
- if !attributeType.Equal(attribute.Type(ctx)) {
- diags.AddError(
- "Invalid FlavorsValue Attribute Type",
- "While creating a FlavorsValue value, an invalid attribute value was detected. "+
- "A FlavorsValue must use a matching attribute type for the value. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("FlavorsValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
- fmt.Sprintf("FlavorsValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
- )
- }
- }
-
- for name := range attributes {
- _, ok := attributeTypes[name]
-
- if !ok {
- diags.AddError(
- "Extra FlavorsValue Attribute Value",
- "While creating a FlavorsValue value, an extra attribute value was detected. "+
- "A FlavorsValue must not contain values beyond the expected attribute types. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("Extra FlavorsValue Attribute Name: %s", name),
- )
- }
- }
-
- if diags.HasError() {
- return NewFlavorsValueUnknown(), diags
- }
-
- cpuAttribute, ok := attributes["cpu"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `cpu is missing from object`)
-
- return NewFlavorsValueUnknown(), diags
- }
-
- cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
- }
-
- descriptionAttribute, ok := attributes["description"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `description is missing from object`)
-
- return NewFlavorsValueUnknown(), diags
- }
-
- descriptionVal, ok := descriptionAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`description expected to be basetypes.StringValue, was: %T`, descriptionAttribute))
- }
-
- idAttribute, ok := attributes["id"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `id is missing from object`)
-
- return NewFlavorsValueUnknown(), diags
- }
-
- idVal, ok := idAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`id expected to be basetypes.StringValue, was: %T`, idAttribute))
- }
-
- maxGbAttribute, ok := attributes["max_gb"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `max_gb is missing from object`)
-
- return NewFlavorsValueUnknown(), diags
- }
-
- maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
- }
-
- memoryAttribute, ok := attributes["memory"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `memory is missing from object`)
-
- return NewFlavorsValueUnknown(), diags
- }
-
- memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
- }
-
- minGbAttribute, ok := attributes["min_gb"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `min_gb is missing from object`)
-
- return NewFlavorsValueUnknown(), diags
- }
-
- minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
- }
-
- nodeTypeAttribute, ok := attributes["node_type"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `node_type is missing from object`)
-
- return NewFlavorsValueUnknown(), diags
- }
-
- nodeTypeVal, ok := nodeTypeAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`node_type expected to be basetypes.StringValue, was: %T`, nodeTypeAttribute))
- }
-
- storageClassesAttribute, ok := attributes["storage_classes"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `storage_classes is missing from object`)
-
- return NewFlavorsValueUnknown(), diags
- }
-
- storageClassesVal, ok := storageClassesAttribute.(basetypes.ListValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`storage_classes expected to be basetypes.ListValue, was: %T`, storageClassesAttribute))
- }
-
- if diags.HasError() {
- return NewFlavorsValueUnknown(), diags
- }
-
- return FlavorsValue{
- Cpu: cpuVal,
- Description: descriptionVal,
- Id: idVal,
- MaxGb: maxGbVal,
- Memory: memoryVal,
- MinGb: minGbVal,
- NodeType: nodeTypeVal,
- StorageClasses: storageClassesVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewFlavorsValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) FlavorsValue {
- object, diags := NewFlavorsValue(attributeTypes, attributes)
-
- if diags.HasError() {
- // This could potentially be added to the diag package.
- diagsStrings := make([]string, 0, len(diags))
-
- for _, diagnostic := range diags {
- diagsStrings = append(diagsStrings, fmt.Sprintf(
- "%s | %s | %s",
- diagnostic.Severity(),
- diagnostic.Summary(),
- diagnostic.Detail()))
- }
-
- panic("NewFlavorsValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
- }
-
- return object
-}
-
-func (t FlavorsType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
- if in.Type() == nil {
- return NewFlavorsValueNull(), nil
- }
-
- if !in.Type().Equal(t.TerraformType(ctx)) {
- return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
- }
-
- if !in.IsKnown() {
- return NewFlavorsValueUnknown(), nil
- }
-
- if in.IsNull() {
- return NewFlavorsValueNull(), nil
- }
-
- attributes := map[string]attr.Value{}
-
- val := map[string]tftypes.Value{}
-
- err := in.As(&val)
-
- if err != nil {
- return nil, err
- }
-
- for k, v := range val {
- a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
-
- if err != nil {
- return nil, err
- }
-
- attributes[k] = a
- }
-
- return NewFlavorsValueMust(FlavorsValue{}.AttributeTypes(ctx), attributes), nil
-}
-
-func (t FlavorsType) ValueType(ctx context.Context) attr.Value {
- return FlavorsValue{}
-}
-
-var _ basetypes.ObjectValuable = FlavorsValue{}
-
-type FlavorsValue struct {
- Cpu basetypes.Int64Value `tfsdk:"cpu"`
- Description basetypes.StringValue `tfsdk:"description"`
- Id basetypes.StringValue `tfsdk:"id"`
- MaxGb basetypes.Int64Value `tfsdk:"max_gb"`
- Memory basetypes.Int64Value `tfsdk:"memory"`
- MinGb basetypes.Int64Value `tfsdk:"min_gb"`
- NodeType basetypes.StringValue `tfsdk:"node_type"`
- StorageClasses basetypes.ListValue `tfsdk:"storage_classes"`
- state attr.ValueState
-}
-
-func (v FlavorsValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 8)
-
- var val tftypes.Value
- var err error
-
- attrTypes["cpu"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["description"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["id"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["max_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["memory"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["min_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["node_type"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["storage_classes"] = basetypes.ListType{
- ElemType: StorageClassesValue{}.Type(ctx),
- }.TerraformType(ctx)
-
- objectType := tftypes.Object{AttributeTypes: attrTypes}
-
- switch v.state {
- case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 8)
-
- val, err = v.Cpu.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["cpu"] = val
-
- val, err = v.Description.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["description"] = val
-
- val, err = v.Id.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["id"] = val
-
- val, err = v.MaxGb.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["max_gb"] = val
-
- val, err = v.Memory.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["memory"] = val
-
- val, err = v.MinGb.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["min_gb"] = val
-
- val, err = v.NodeType.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["node_type"] = val
-
- val, err = v.StorageClasses.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["storage_classes"] = val
-
- if err := tftypes.ValidateValue(objectType, vals); err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- return tftypes.NewValue(objectType, vals), nil
- case attr.ValueStateNull:
- return tftypes.NewValue(objectType, nil), nil
- case attr.ValueStateUnknown:
- return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
- default:
- panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
- }
-}
-
-func (v FlavorsValue) IsNull() bool {
- return v.state == attr.ValueStateNull
-}
-
-func (v FlavorsValue) IsUnknown() bool {
- return v.state == attr.ValueStateUnknown
-}
-
-func (v FlavorsValue) String() string {
- return "FlavorsValue"
-}
-
-func (v FlavorsValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- storageClasses := types.ListValueMust(
- StorageClassesType{
- basetypes.ObjectType{
- AttrTypes: StorageClassesValue{}.AttributeTypes(ctx),
- },
- },
- v.StorageClasses.Elements(),
- )
-
- if v.StorageClasses.IsNull() {
- storageClasses = types.ListNull(
- StorageClassesType{
- basetypes.ObjectType{
- AttrTypes: StorageClassesValue{}.AttributeTypes(ctx),
- },
- },
- )
- }
-
- if v.StorageClasses.IsUnknown() {
- storageClasses = types.ListUnknown(
- StorageClassesType{
- basetypes.ObjectType{
- AttrTypes: StorageClassesValue{}.AttributeTypes(ctx),
- },
- },
- )
- }
-
- attributeTypes := map[string]attr.Type{
- "cpu": basetypes.Int64Type{},
- "description": basetypes.StringType{},
- "id": basetypes.StringType{},
- "max_gb": basetypes.Int64Type{},
- "memory": basetypes.Int64Type{},
- "min_gb": basetypes.Int64Type{},
- "node_type": basetypes.StringType{},
- "storage_classes": basetypes.ListType{
- ElemType: StorageClassesValue{}.Type(ctx),
- },
- }
-
- if v.IsNull() {
- return types.ObjectNull(attributeTypes), diags
- }
-
- if v.IsUnknown() {
- return types.ObjectUnknown(attributeTypes), diags
- }
-
- objVal, diags := types.ObjectValue(
- attributeTypes,
- map[string]attr.Value{
- "cpu": v.Cpu,
- "description": v.Description,
- "id": v.Id,
- "max_gb": v.MaxGb,
- "memory": v.Memory,
- "min_gb": v.MinGb,
- "node_type": v.NodeType,
- "storage_classes": storageClasses,
- })
-
- return objVal, diags
-}
-
-func (v FlavorsValue) Equal(o attr.Value) bool {
- other, ok := o.(FlavorsValue)
-
- if !ok {
- return false
- }
-
- if v.state != other.state {
- return false
- }
-
- if v.state != attr.ValueStateKnown {
- return true
- }
-
- if !v.Cpu.Equal(other.Cpu) {
- return false
- }
-
- if !v.Description.Equal(other.Description) {
- return false
- }
-
- if !v.Id.Equal(other.Id) {
- return false
- }
-
- if !v.MaxGb.Equal(other.MaxGb) {
- return false
- }
-
- if !v.Memory.Equal(other.Memory) {
- return false
- }
-
- if !v.MinGb.Equal(other.MinGb) {
- return false
- }
-
- if !v.NodeType.Equal(other.NodeType) {
- return false
- }
-
- if !v.StorageClasses.Equal(other.StorageClasses) {
- return false
- }
-
- return true
-}
-
-func (v FlavorsValue) Type(ctx context.Context) attr.Type {
- return FlavorsType{
- basetypes.ObjectType{
- AttrTypes: v.AttributeTypes(ctx),
- },
- }
-}
-
-func (v FlavorsValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
- return map[string]attr.Type{
- "cpu": basetypes.Int64Type{},
- "description": basetypes.StringType{},
- "id": basetypes.StringType{},
- "max_gb": basetypes.Int64Type{},
- "memory": basetypes.Int64Type{},
- "min_gb": basetypes.Int64Type{},
- "node_type": basetypes.StringType{},
- "storage_classes": basetypes.ListType{
- ElemType: StorageClassesValue{}.Type(ctx),
- },
- }
-}
-
-var _ basetypes.ObjectTypable = StorageClassesType{}
-
-type StorageClassesType struct {
- basetypes.ObjectType
-}
-
-func (t StorageClassesType) Equal(o attr.Type) bool {
- other, ok := o.(StorageClassesType)
-
- if !ok {
- return false
- }
-
- return t.ObjectType.Equal(other.ObjectType)
-}
-
-func (t StorageClassesType) String() string {
- return "StorageClassesType"
-}
-
-func (t StorageClassesType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributes := in.Attributes()
-
- classAttribute, ok := attributes["class"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `class is missing from object`)
-
- return nil, diags
- }
-
- classVal, ok := classAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute))
- }
-
- maxIoPerSecAttribute, ok := attributes["max_io_per_sec"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `max_io_per_sec is missing from object`)
-
- return nil, diags
- }
-
- maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute))
- }
-
- maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `max_through_in_mb is missing from object`)
-
- return nil, diags
- }
-
- maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute))
- }
-
- if diags.HasError() {
- return nil, diags
- }
-
- return StorageClassesValue{
- Class: classVal,
- MaxIoPerSec: maxIoPerSecVal,
- MaxThroughInMb: maxThroughInMbVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewStorageClassesValueNull() StorageClassesValue {
- return StorageClassesValue{
- state: attr.ValueStateNull,
- }
-}
-
-func NewStorageClassesValueUnknown() StorageClassesValue {
- return StorageClassesValue{
- state: attr.ValueStateUnknown,
- }
-}
-
-func NewStorageClassesValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (StorageClassesValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
- ctx := context.Background()
-
- for name, attributeType := range attributeTypes {
- attribute, ok := attributes[name]
-
- if !ok {
- diags.AddError(
- "Missing StorageClassesValue Attribute Value",
- "While creating a StorageClassesValue value, a missing attribute value was detected. "+
- "A StorageClassesValue must contain values for all attributes, even if null or unknown. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("StorageClassesValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
- )
-
- continue
- }
-
- if !attributeType.Equal(attribute.Type(ctx)) {
- diags.AddError(
- "Invalid StorageClassesValue Attribute Type",
- "While creating a StorageClassesValue value, an invalid attribute value was detected. "+
- "A StorageClassesValue must use a matching attribute type for the value. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("StorageClassesValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
- fmt.Sprintf("StorageClassesValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
- )
- }
- }
-
- for name := range attributes {
- _, ok := attributeTypes[name]
-
- if !ok {
- diags.AddError(
- "Extra StorageClassesValue Attribute Value",
- "While creating a StorageClassesValue value, an extra attribute value was detected. "+
- "A StorageClassesValue must not contain values beyond the expected attribute types. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("Extra StorageClassesValue Attribute Name: %s", name),
- )
- }
- }
-
- if diags.HasError() {
- return NewStorageClassesValueUnknown(), diags
- }
-
- classAttribute, ok := attributes["class"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `class is missing from object`)
-
- return NewStorageClassesValueUnknown(), diags
- }
-
- classVal, ok := classAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute))
- }
-
- maxIoPerSecAttribute, ok := attributes["max_io_per_sec"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `max_io_per_sec is missing from object`)
-
- return NewStorageClassesValueUnknown(), diags
- }
-
- maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute))
- }
-
- maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `max_through_in_mb is missing from object`)
-
- return NewStorageClassesValueUnknown(), diags
- }
-
- maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute))
- }
-
- if diags.HasError() {
- return NewStorageClassesValueUnknown(), diags
- }
-
- return StorageClassesValue{
- Class: classVal,
- MaxIoPerSec: maxIoPerSecVal,
- MaxThroughInMb: maxThroughInMbVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewStorageClassesValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) StorageClassesValue {
- object, diags := NewStorageClassesValue(attributeTypes, attributes)
-
- if diags.HasError() {
- // This could potentially be added to the diag package.
- diagsStrings := make([]string, 0, len(diags))
-
- for _, diagnostic := range diags {
- diagsStrings = append(diagsStrings, fmt.Sprintf(
- "%s | %s | %s",
- diagnostic.Severity(),
- diagnostic.Summary(),
- diagnostic.Detail()))
- }
-
- panic("NewStorageClassesValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
- }
-
- return object
-}
-
-func (t StorageClassesType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
- if in.Type() == nil {
- return NewStorageClassesValueNull(), nil
- }
-
- if !in.Type().Equal(t.TerraformType(ctx)) {
- return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
- }
-
- if !in.IsKnown() {
- return NewStorageClassesValueUnknown(), nil
- }
-
- if in.IsNull() {
- return NewStorageClassesValueNull(), nil
- }
-
- attributes := map[string]attr.Value{}
-
- val := map[string]tftypes.Value{}
-
- err := in.As(&val)
-
- if err != nil {
- return nil, err
- }
-
- for k, v := range val {
- a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
-
- if err != nil {
- return nil, err
- }
-
- attributes[k] = a
- }
-
- return NewStorageClassesValueMust(StorageClassesValue{}.AttributeTypes(ctx), attributes), nil
-}
-
-func (t StorageClassesType) ValueType(ctx context.Context) attr.Value {
- return StorageClassesValue{}
-}
-
-var _ basetypes.ObjectValuable = StorageClassesValue{}
-
-type StorageClassesValue struct {
- Class basetypes.StringValue `tfsdk:"class"`
- MaxIoPerSec basetypes.Int64Value `tfsdk:"max_io_per_sec"`
- MaxThroughInMb basetypes.Int64Value `tfsdk:"max_through_in_mb"`
- state attr.ValueState
-}
-
-func (v StorageClassesValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 3)
-
- var val tftypes.Value
- var err error
-
- attrTypes["class"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["max_io_per_sec"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["max_through_in_mb"] = basetypes.Int64Type{}.TerraformType(ctx)
-
- objectType := tftypes.Object{AttributeTypes: attrTypes}
-
- switch v.state {
- case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 3)
-
- val, err = v.Class.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["class"] = val
-
- val, err = v.MaxIoPerSec.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["max_io_per_sec"] = val
-
- val, err = v.MaxThroughInMb.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["max_through_in_mb"] = val
-
- if err := tftypes.ValidateValue(objectType, vals); err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- return tftypes.NewValue(objectType, vals), nil
- case attr.ValueStateNull:
- return tftypes.NewValue(objectType, nil), nil
- case attr.ValueStateUnknown:
- return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
- default:
- panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
- }
-}
-
-func (v StorageClassesValue) IsNull() bool {
- return v.state == attr.ValueStateNull
-}
-
-func (v StorageClassesValue) IsUnknown() bool {
- return v.state == attr.ValueStateUnknown
-}
-
-func (v StorageClassesValue) String() string {
- return "StorageClassesValue"
-}
-
-func (v StorageClassesValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributeTypes := map[string]attr.Type{
- "class": basetypes.StringType{},
- "max_io_per_sec": basetypes.Int64Type{},
- "max_through_in_mb": basetypes.Int64Type{},
- }
-
- if v.IsNull() {
- return types.ObjectNull(attributeTypes), diags
- }
-
- if v.IsUnknown() {
- return types.ObjectUnknown(attributeTypes), diags
- }
-
- objVal, diags := types.ObjectValue(
- attributeTypes,
- map[string]attr.Value{
- "class": v.Class,
- "max_io_per_sec": v.MaxIoPerSec,
- "max_through_in_mb": v.MaxThroughInMb,
- })
-
- return objVal, diags
-}
-
-func (v StorageClassesValue) Equal(o attr.Value) bool {
- other, ok := o.(StorageClassesValue)
-
- if !ok {
- return false
- }
-
- if v.state != other.state {
- return false
- }
-
- if v.state != attr.ValueStateKnown {
- return true
- }
-
- if !v.Class.Equal(other.Class) {
- return false
- }
-
- if !v.MaxIoPerSec.Equal(other.MaxIoPerSec) {
- return false
- }
-
- if !v.MaxThroughInMb.Equal(other.MaxThroughInMb) {
- return false
- }
-
- return true
-}
-
-func (v StorageClassesValue) Type(ctx context.Context) attr.Type {
- return StorageClassesType{
- basetypes.ObjectType{
- AttrTypes: v.AttributeTypes(ctx),
- },
- }
-}
-
-func (v StorageClassesValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
- return map[string]attr.Type{
- "class": basetypes.StringType{},
- "max_io_per_sec": basetypes.Int64Type{},
- "max_through_in_mb": basetypes.Int64Type{},
- }
-}
-
-var _ basetypes.ObjectTypable = PaginationType{}
-
-type PaginationType struct {
- basetypes.ObjectType
-}
-
-func (t PaginationType) Equal(o attr.Type) bool {
- other, ok := o.(PaginationType)
-
- if !ok {
- return false
- }
-
- return t.ObjectType.Equal(other.ObjectType)
-}
-
-func (t PaginationType) String() string {
- return "PaginationType"
-}
-
-func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributes := in.Attributes()
-
- pageAttribute, ok := attributes["page"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `page is missing from object`)
-
- return nil, diags
- }
-
- pageVal, ok := pageAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
- }
-
- sizeAttribute, ok := attributes["size"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `size is missing from object`)
-
- return nil, diags
- }
-
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
- }
-
- sortAttribute, ok := attributes["sort"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `sort is missing from object`)
-
- return nil, diags
- }
-
- sortVal, ok := sortAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute))
- }
-
- totalPagesAttribute, ok := attributes["total_pages"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `total_pages is missing from object`)
-
- return nil, diags
- }
-
- totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
- }
-
- totalRowsAttribute, ok := attributes["total_rows"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `total_rows is missing from object`)
-
- return nil, diags
- }
-
- totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
- }
-
- if diags.HasError() {
- return nil, diags
- }
-
- return PaginationValue{
- Page: pageVal,
- Size: sizeVal,
- Sort: sortVal,
- TotalPages: totalPagesVal,
- TotalRows: totalRowsVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewPaginationValueNull() PaginationValue {
- return PaginationValue{
- state: attr.ValueStateNull,
- }
-}
-
-func NewPaginationValueUnknown() PaginationValue {
- return PaginationValue{
- state: attr.ValueStateUnknown,
- }
-}
-
-func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (PaginationValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
- ctx := context.Background()
-
- for name, attributeType := range attributeTypes {
- attribute, ok := attributes[name]
-
- if !ok {
- diags.AddError(
- "Missing PaginationValue Attribute Value",
- "While creating a PaginationValue value, a missing attribute value was detected. "+
- "A PaginationValue must contain values for all attributes, even if null or unknown. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
- )
-
- continue
- }
-
- if !attributeType.Equal(attribute.Type(ctx)) {
- diags.AddError(
- "Invalid PaginationValue Attribute Type",
- "While creating a PaginationValue value, an invalid attribute value was detected. "+
- "A PaginationValue must use a matching attribute type for the value. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
- fmt.Sprintf("PaginationValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
- )
- }
- }
-
- for name := range attributes {
- _, ok := attributeTypes[name]
-
- if !ok {
- diags.AddError(
- "Extra PaginationValue Attribute Value",
- "While creating a PaginationValue value, an extra attribute value was detected. "+
- "A PaginationValue must not contain values beyond the expected attribute types. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("Extra PaginationValue Attribute Name: %s", name),
- )
- }
- }
-
- if diags.HasError() {
- return NewPaginationValueUnknown(), diags
- }
-
- pageAttribute, ok := attributes["page"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `page is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- pageVal, ok := pageAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
- }
-
- sizeAttribute, ok := attributes["size"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `size is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
- }
-
- sortAttribute, ok := attributes["sort"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `sort is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- sortVal, ok := sortAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute))
- }
-
- totalPagesAttribute, ok := attributes["total_pages"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `total_pages is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
- }
-
- totalRowsAttribute, ok := attributes["total_rows"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `total_rows is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
- }
-
- if diags.HasError() {
- return NewPaginationValueUnknown(), diags
- }
-
- return PaginationValue{
- Page: pageVal,
- Size: sizeVal,
- Sort: sortVal,
- TotalPages: totalPagesVal,
- TotalRows: totalRowsVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewPaginationValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) PaginationValue {
- object, diags := NewPaginationValue(attributeTypes, attributes)
-
- if diags.HasError() {
- // This could potentially be added to the diag package.
- diagsStrings := make([]string, 0, len(diags))
-
- for _, diagnostic := range diags {
- diagsStrings = append(diagsStrings, fmt.Sprintf(
- "%s | %s | %s",
- diagnostic.Severity(),
- diagnostic.Summary(),
- diagnostic.Detail()))
- }
-
- panic("NewPaginationValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
- }
-
- return object
-}
-
-func (t PaginationType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
- if in.Type() == nil {
- return NewPaginationValueNull(), nil
- }
-
- if !in.Type().Equal(t.TerraformType(ctx)) {
- return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
- }
-
- if !in.IsKnown() {
- return NewPaginationValueUnknown(), nil
- }
-
- if in.IsNull() {
- return NewPaginationValueNull(), nil
- }
-
- attributes := map[string]attr.Value{}
-
- val := map[string]tftypes.Value{}
-
- err := in.As(&val)
-
- if err != nil {
- return nil, err
- }
-
- for k, v := range val {
- a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
-
- if err != nil {
- return nil, err
- }
-
- attributes[k] = a
- }
-
- return NewPaginationValueMust(PaginationValue{}.AttributeTypes(ctx), attributes), nil
-}
-
-func (t PaginationType) ValueType(ctx context.Context) attr.Value {
- return PaginationValue{}
-}
-
-var _ basetypes.ObjectValuable = PaginationValue{}
-
-type PaginationValue struct {
- Page basetypes.Int64Value `tfsdk:"page"`
- Size basetypes.Int64Value `tfsdk:"size"`
- Sort basetypes.StringValue `tfsdk:"sort"`
- TotalPages basetypes.Int64Value `tfsdk:"total_pages"`
- TotalRows basetypes.Int64Value `tfsdk:"total_rows"`
- state attr.ValueState
-}
-
-func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 5)
-
- var val tftypes.Value
- var err error
-
- attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx)
-
- objectType := tftypes.Object{AttributeTypes: attrTypes}
-
- switch v.state {
- case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 5)
-
- val, err = v.Page.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["page"] = val
-
- val, err = v.Size.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["size"] = val
-
- val, err = v.Sort.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["sort"] = val
-
- val, err = v.TotalPages.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["total_pages"] = val
-
- val, err = v.TotalRows.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["total_rows"] = val
-
- if err := tftypes.ValidateValue(objectType, vals); err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- return tftypes.NewValue(objectType, vals), nil
- case attr.ValueStateNull:
- return tftypes.NewValue(objectType, nil), nil
- case attr.ValueStateUnknown:
- return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
- default:
- panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
- }
-}
-
-func (v PaginationValue) IsNull() bool {
- return v.state == attr.ValueStateNull
-}
-
-func (v PaginationValue) IsUnknown() bool {
- return v.state == attr.ValueStateUnknown
-}
-
-func (v PaginationValue) String() string {
- return "PaginationValue"
-}
-
-func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributeTypes := map[string]attr.Type{
- "page": basetypes.Int64Type{},
- "size": basetypes.Int64Type{},
- "sort": basetypes.StringType{},
- "total_pages": basetypes.Int64Type{},
- "total_rows": basetypes.Int64Type{},
- }
-
- if v.IsNull() {
- return types.ObjectNull(attributeTypes), diags
- }
-
- if v.IsUnknown() {
- return types.ObjectUnknown(attributeTypes), diags
- }
-
- objVal, diags := types.ObjectValue(
- attributeTypes,
- map[string]attr.Value{
- "page": v.Page,
- "size": v.Size,
- "sort": v.Sort,
- "total_pages": v.TotalPages,
- "total_rows": v.TotalRows,
- })
-
- return objVal, diags
-}
-
-func (v PaginationValue) Equal(o attr.Value) bool {
- other, ok := o.(PaginationValue)
-
- if !ok {
- return false
- }
-
- if v.state != other.state {
- return false
- }
-
- if v.state != attr.ValueStateKnown {
- return true
- }
-
- if !v.Page.Equal(other.Page) {
- return false
- }
-
- if !v.Size.Equal(other.Size) {
- return false
- }
-
- if !v.Sort.Equal(other.Sort) {
- return false
- }
-
- if !v.TotalPages.Equal(other.TotalPages) {
- return false
- }
-
- if !v.TotalRows.Equal(other.TotalRows) {
- return false
- }
-
- return true
-}
-
-func (v PaginationValue) Type(ctx context.Context) attr.Type {
- return PaginationType{
- basetypes.ObjectType{
- AttrTypes: v.AttributeTypes(ctx),
- },
- }
-}
-
-func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
- return map[string]attr.Type{
- "page": basetypes.Int64Type{},
- "size": basetypes.Int64Type{},
- "sort": basetypes.StringType{},
- "total_pages": basetypes.Int64Type{},
- "total_rows": basetypes.Int64Type{},
- }
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/flavor/functions.go b/stackit/internal/services/sqlserverflexbeta/flavor/functions.go
deleted file mode 100644
index a823e397..00000000
--- a/stackit/internal/services/sqlserverflexbeta/flavor/functions.go
+++ /dev/null
@@ -1,65 +0,0 @@
-package sqlserverFlexBetaFlavor
-
-import (
- "context"
- "fmt"
-
- "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3beta1api"
-)
-
-type flavorsClientReader interface {
- GetFlavorsRequest(
- ctx context.Context,
- projectId, region string,
- ) v3beta1api.ApiGetFlavorsRequestRequest
-}
-
-func getAllFlavors(ctx context.Context, client flavorsClientReader, projectId, region string) (
- []v3beta1api.ListFlavors,
- error,
-) {
- getAllFilter := func(_ v3beta1api.ListFlavors) bool { return true }
- flavorList, err := getFlavorsByFilter(ctx, client, projectId, region, getAllFilter)
- if err != nil {
- return nil, err
- }
- return flavorList, nil
-}
-
-// getFlavorsByFilter is a helper function to retrieve flavors using a filtern function.
-// Hint: The API does not have a GetFlavors endpoint, only ListFlavors
-func getFlavorsByFilter(
- ctx context.Context,
- client flavorsClientReader,
- projectId, region string,
- filter func(db v3beta1api.ListFlavors) bool,
-) ([]v3beta1api.ListFlavors, error) {
- if projectId == "" || region == "" {
- return nil, fmt.Errorf("listing v3beta1api flavors: projectId and region are required")
- }
-
- const pageSize = 25
-
- var result = make([]v3beta1api.ListFlavors, 0)
-
- for page := int64(1); ; page++ {
- res, err := client.GetFlavorsRequest(ctx, projectId, region).
- Page(page).Size(pageSize).Sort(v3beta1api.FLAVORSORT_INDEX_ASC).Execute()
- if err != nil {
- return nil, fmt.Errorf("requesting flavors list (page %d): %w", page, err)
- }
-
- // If the API returns no flavors, we have reached the end of the list.
- if len(res.Flavors) == 0 {
- break
- }
-
- for _, flavor := range res.Flavors {
- if filter(flavor) {
- result = append(result, flavor)
- }
- }
- }
-
- return result, nil
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/flavor/functions_test.go b/stackit/internal/services/sqlserverflexbeta/flavor/functions_test.go
deleted file mode 100644
index 72143b7f..00000000
--- a/stackit/internal/services/sqlserverflexbeta/flavor/functions_test.go
+++ /dev/null
@@ -1,135 +0,0 @@
-package sqlserverFlexBetaFlavor
-
-// import (
-// "context"
-// "testing"
-//
-// "github.com/stackitcloud/stackit-sdk-go/core/utils"
-//
-// "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3beta1api"
-//)
-//
-// type mockRequest struct {
-// executeFunc func() (*v3beta1api.GetFlavorsResponse, error)
-//}
-//
-// func (m *mockRequest) Page(_ int64) v3beta1api.ApiGetFlavorsRequestRequest { return m }
-// func (m *mockRequest) Size(_ int64) v3beta1api.ApiGetFlavorsRequestRequest { return m }
-// func (m *mockRequest) Sort(_ v3beta1api.FlavorSort) v3beta1api.ApiGetFlavorsRequestRequest {
-// return m
-//}
-// func (m *mockRequest) Execute() (*v3beta1api.GetFlavorsResponse, error) {
-// return m.executeFunc()
-//}
-//
-// type mockFlavorsClient struct {
-// executeRequest func() v3beta1api.ApiGetFlavorsRequestRequest
-//}
-//
-// func (m *mockFlavorsClient) GetFlavorsRequest(_ context.Context, _, _ string) v3beta1api.ApiGetFlavorsRequestRequest {
-// return m.executeRequest()
-//}
-//
-// var mockResp = func(page int64) (*v3beta1api.GetFlavorsResponse, error) {
-// if page == 1 {
-// return &v3beta1api.GetFlavorsResponse{
-// Flavors: &[]v3beta1api.ListFlavors{
-// {Id: utils.Ptr("flavor-1"), Description: utils.Ptr("first")},
-// {Id: utils.Ptr("flavor-2"), Description: utils.Ptr("second")},
-// },
-// }, nil
-// }
-// if page == 2 {
-// return &v3beta1api.GetFlavorsResponse{
-// Flavors: &[]v3beta1api.ListFlavors{
-// {Id: utils.Ptr("flavor-3"), Description: utils.Ptr("three")},
-// },
-// }, nil
-// }
-//
-// return &v3beta1api.GetFlavorsResponse{
-// Flavors: &[]v3beta1api.ListFlavors{},
-// }, nil
-//}
-//
-// func TestGetFlavorsByFilter(t *testing.T) {
-// tests := []struct {
-// description string
-// projectId string
-// region string
-// mockErr error
-// filter func(v3beta1api.ListFlavors) bool
-// wantCount int
-// wantErr bool
-// }{
-// {
-// description: "Success - Get all flavors (2 pages)",
-// projectId: "pid", region: "reg",
-// filter: func(_ v3beta1api.ListFlavors) bool { return true },
-// wantCount: 3,
-// wantErr: false,
-// },
-// {
-// description: "Success - Filter flavors by description",
-// projectId: "pid", region: "reg",
-// filter: func(f v3beta1api.ListFlavors) bool { return *f.Description == "first" },
-// wantCount: 1,
-// wantErr: false,
-// },
-// {
-// description: "Error - Missing parameters",
-// projectId: "", region: "reg",
-// wantErr: true,
-// },
-// }
-//
-// for _, tt := range tests {
-// t.Run(
-// tt.description, func(t *testing.T) {
-// var currentPage int64
-// client := &mockFlavorsClient{
-// executeRequest: func() v3beta1api.ApiGetFlavorsRequestRequest {
-// return &mockRequest{
-// executeFunc: func() (*v3beta1api.GetFlavorsResponse, error) {
-// currentPage++
-// return mockResp(currentPage)
-// },
-// }
-// },
-// }
-// actual, err := getFlavorsByFilter(context.Background(), client, tt.projectId, tt.region, tt.filter)
-//
-// if (err != nil) != tt.wantErr {
-// t.Errorf("getFlavorsByFilter() error = %v, wantErr %v", err, tt.wantErr)
-// return
-// }
-//
-// if !tt.wantErr && len(actual) != tt.wantCount {
-// t.Errorf("getFlavorsByFilter() got %d flavors, want %d", len(actual), tt.wantCount)
-// }
-// },
-// )
-// }
-//}
-//
-// func TestGetAllFlavors(t *testing.T) {
-// var currentPage int64
-// client := &mockFlavorsClient{
-// executeRequest: func() v3beta1api.ApiGetFlavorsRequestRequest {
-// return &mockRequest{
-// executeFunc: func() (*v3beta1api.GetFlavorsResponse, error) {
-// currentPage++
-// return mockResp(currentPage)
-// },
-// }
-// },
-// }
-//
-// res, err := getAllFlavors(context.Background(), client, "pid", "reg")
-// if err != nil {
-// t.Errorf("getAllFlavors() unexpected error: %v", err)
-// }
-// if len(res) != 3 {
-// t.Errorf("getAllFlavors() expected 3 flavor, got %d", len(res))
-// }
-//}
diff --git a/stackit/internal/services/sqlserverflexbeta/flavors/datasource.go b/stackit/internal/services/sqlserverflexbeta/flavors/datasource.go
deleted file mode 100644
index 94540f22..00000000
--- a/stackit/internal/services/sqlserverflexbeta/flavors/datasource.go
+++ /dev/null
@@ -1,156 +0,0 @@
-package sqlserverflexbeta
-
-import (
- "context"
- "fmt"
- "net/http"
-
- "github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-
- "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3beta1api"
-
- sqlserverflexbetaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/flavors/datasources_gen"
-)
-
-var _ datasource.DataSource = (*flavorsDataSource)(nil)
-
-const errorPrefix = "[Sqlserverflexbeta - Flavors]"
-
-func NewFlavorsDataSource() datasource.DataSource {
- return &flavorsDataSource{}
-}
-
-type dataSourceModel struct {
- sqlserverflexbetaGen.FlavorsModel
- TerraformId types.String `tfsdk:"id"`
-}
-
-type flavorsDataSource struct {
- client *v3beta1api.APIClient
- providerData core.ProviderData
-}
-
-func (d *flavorsDataSource) Metadata(
- _ context.Context,
- req datasource.MetadataRequest,
- resp *datasource.MetadataResponse,
-) {
- resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_flavors"
-}
-
-func (d *flavorsDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- resp.Schema = sqlserverflexbetaGen.FlavorsDataSourceSchema(ctx)
- resp.Schema.Attributes["id"] = schema.StringAttribute{
- Computed: true,
- Description: "The terraform internal identifier.",
- MarkdownDescription: "The terraform internal identifier.",
- }
-}
-
-// Configure adds the provider configured client to the data source.
-func (d *flavorsDataSource) Configure(
- ctx context.Context,
- req datasource.ConfigureRequest,
- resp *datasource.ConfigureResponse,
-) {
- var ok bool
- d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
- if !ok {
- return
- }
-
- apiClientConfigOptions := []config.ConfigurationOption{
- config.WithCustomAuth(d.providerData.RoundTripper),
- utils.UserAgentConfigOption(d.providerData.Version),
- }
- if d.providerData.SQLServerFlexCustomEndpoint != "" {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithEndpoint(d.providerData.SQLServerFlexCustomEndpoint),
- )
- } else {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithRegion(d.providerData.GetRegion()),
- )
- }
- apiClient, err := v3beta1api.NewAPIClient(apiClientConfigOptions...)
- if err != nil {
- resp.Diagnostics.AddError(
- "Error configuring API client",
- fmt.Sprintf(
- "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
- err,
- ),
- )
- return
- }
- d.client = apiClient
- tflog.Info(ctx, fmt.Sprintf("%s client configured", errorPrefix))
-}
-
-func (d *flavorsDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var data dataSourceModel
-
- // Read Terraform configuration data into the model
- resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := data.ProjectId.ValueString()
- region := d.providerData.GetRegionWithOverride(data.Region)
- // TODO: implement right identifier for flavors
- flavorsId := data.Flavors
-
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
-
- // TODO: implement needed fields
- ctx = tflog.SetField(ctx, "flavors_id", flavorsId)
-
- // TODO: refactor to correct implementation
- _, err := d.client.DefaultAPI.GetFlavorsRequest(ctx, projectId, region).Execute()
- if err != nil {
- utils.LogError(
- ctx,
- &resp.Diagnostics,
- err,
- "Reading flavors",
- fmt.Sprintf("flavors with ID %q does not exist in project %q.", flavorsId, projectId),
- map[int]string{
- http.StatusForbidden: fmt.Sprintf("Project with ID %q not found or forbidden access", projectId),
- },
- )
- resp.State.RemoveResource(ctx)
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- // TODO: refactor to correct implementation of internal tf id
- data.TerraformId = utils.BuildInternalTerraformId(projectId, region)
-
- // TODO: fill remaining fields
- // data.Flavors = types.Sometype(apiResponse.GetFlavors())
- // data.Page = types.Sometype(apiResponse.GetPage())
- // data.Pagination = types.Sometype(apiResponse.GetPagination())
- // data.ProjectId = types.Sometype(apiResponse.GetProjectId())
- // data.Region = types.Sometype(apiResponse.GetRegion())
- // data.Size = types.Sometype(apiResponse.GetSize())
- // data.Sort = types.Sometype(apiResponse.GetSort())// Save data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-
- tflog.Info(ctx, fmt.Sprintf("%s read successful", errorPrefix))
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/flavors/datasources_gen/flavors_data_source_gen.go b/stackit/internal/services/sqlserverflexbeta/flavors/datasources_gen/flavors_data_source_gen.go
deleted file mode 100644
index a9d35ba1..00000000
--- a/stackit/internal/services/sqlserverflexbeta/flavors/datasources_gen/flavors_data_source_gen.go
+++ /dev/null
@@ -1,1909 +0,0 @@
-// Code generated by terraform-plugin-framework-generator DO NOT EDIT.
-
-package sqlserverflexbeta
-
-import (
- "context"
- "fmt"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
- "github.com/hashicorp/terraform-plugin-go/tftypes"
- "strings"
-
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
-)
-
-func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
- return schema.Schema{
- Attributes: map[string]schema.Attribute{
- "flavors": schema.ListNestedAttribute{
- NestedObject: schema.NestedAttributeObject{
- Attributes: map[string]schema.Attribute{
- "cpu": schema.Int64Attribute{
- Computed: true,
- Description: "The cpu count of the instance.",
- MarkdownDescription: "The cpu count of the instance.",
- },
- "description": schema.StringAttribute{
- Computed: true,
- Description: "The flavor description.",
- MarkdownDescription: "The flavor description.",
- },
- "tf_original_api_id": schema.StringAttribute{
- Computed: true,
- Description: "The id of the instance flavor.",
- MarkdownDescription: "The id of the instance flavor.",
- },
- "max_gb": schema.Int64Attribute{
- Computed: true,
- Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
- MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
- },
- "memory": schema.Int64Attribute{
- Computed: true,
- Description: "The memory of the instance in Gibibyte.",
- MarkdownDescription: "The memory of the instance in Gibibyte.",
- },
- "min_gb": schema.Int64Attribute{
- Computed: true,
- Description: "minimum storage which is required to order in Gigabyte.",
- MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
- },
- "node_type": schema.StringAttribute{
- Computed: true,
- Description: "defines the nodeType it can be either single or HA",
- MarkdownDescription: "defines the nodeType it can be either single or HA",
- },
- "storage_classes": schema.ListNestedAttribute{
- NestedObject: schema.NestedAttributeObject{
- Attributes: map[string]schema.Attribute{
- "class": schema.StringAttribute{
- Computed: true,
- },
- "max_io_per_sec": schema.Int64Attribute{
- Computed: true,
- },
- "max_through_in_mb": schema.Int64Attribute{
- Computed: true,
- },
- },
- CustomType: StorageClassesType{
- ObjectType: types.ObjectType{
- AttrTypes: StorageClassesValue{}.AttributeTypes(ctx),
- },
- },
- },
- Computed: true,
- Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
- MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
- },
- },
- CustomType: FlavorsType{
- ObjectType: types.ObjectType{
- AttrTypes: FlavorsValue{}.AttributeTypes(ctx),
- },
- },
- },
- Computed: true,
- Description: "List of flavors available for the project.",
- MarkdownDescription: "List of flavors available for the project.",
- },
- "page": schema.Int64Attribute{
- Optional: true,
- Computed: true,
- Description: "Number of the page of items list to be returned.",
- MarkdownDescription: "Number of the page of items list to be returned.",
- },
- "pagination": schema.SingleNestedAttribute{
- Attributes: map[string]schema.Attribute{
- "page": schema.Int64Attribute{
- Computed: true,
- },
- "size": schema.Int64Attribute{
- Computed: true,
- },
- "sort": schema.StringAttribute{
- Computed: true,
- },
- "total_pages": schema.Int64Attribute{
- Computed: true,
- },
- "total_rows": schema.Int64Attribute{
- Computed: true,
- },
- },
- CustomType: PaginationType{
- ObjectType: types.ObjectType{
- AttrTypes: PaginationValue{}.AttributeTypes(ctx),
- },
- },
- Computed: true,
- },
- "project_id": schema.StringAttribute{
- Required: true,
- Description: "The STACKIT project ID.",
- MarkdownDescription: "The STACKIT project ID.",
- },
- "region": schema.StringAttribute{
- Required: true,
- Description: "The region which should be addressed",
- MarkdownDescription: "The region which should be addressed",
- Validators: []validator.String{
- stringvalidator.OneOf(
- "eu01",
- ),
- },
- },
- "size": schema.Int64Attribute{
- Optional: true,
- Computed: true,
- Description: "Number of items to be returned on each page.",
- MarkdownDescription: "Number of items to be returned on each page.",
- },
- "sort": schema.StringAttribute{
- Optional: true,
- Computed: true,
- Description: "Sorting of the flavors to be returned on each page.",
- MarkdownDescription: "Sorting of the flavors to be returned on each page.",
- Validators: []validator.String{
- stringvalidator.OneOf(
- "index.desc",
- "index.asc",
- "cpu.desc",
- "cpu.asc",
- "flavor_description.asc",
- "flavor_description.desc",
- "id.desc",
- "id.asc",
- "size_max.desc",
- "size_max.asc",
- "ram.desc",
- "ram.asc",
- "size_min.desc",
- "size_min.asc",
- "storage_class.asc",
- "storage_class.desc",
- "node_type.asc",
- "node_type.desc",
- ),
- },
- },
- },
- }
-}
-
-type FlavorsModel struct {
- Flavors types.List `tfsdk:"flavors"`
- Page types.Int64 `tfsdk:"page"`
- Pagination PaginationValue `tfsdk:"pagination"`
- ProjectId types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- Size types.Int64 `tfsdk:"size"`
- Sort types.String `tfsdk:"sort"`
-}
-
-var _ basetypes.ObjectTypable = FlavorsType{}
-
-type FlavorsType struct {
- basetypes.ObjectType
-}
-
-func (t FlavorsType) Equal(o attr.Type) bool {
- other, ok := o.(FlavorsType)
-
- if !ok {
- return false
- }
-
- return t.ObjectType.Equal(other.ObjectType)
-}
-
-func (t FlavorsType) String() string {
- return "FlavorsType"
-}
-
-func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributes := in.Attributes()
-
- cpuAttribute, ok := attributes["cpu"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `cpu is missing from object`)
-
- return nil, diags
- }
-
- cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
- }
-
- descriptionAttribute, ok := attributes["description"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `description is missing from object`)
-
- return nil, diags
- }
-
- descriptionVal, ok := descriptionAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`description expected to be basetypes.StringValue, was: %T`, descriptionAttribute))
- }
-
- idAttribute, ok := attributes["id"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `id is missing from object`)
-
- return nil, diags
- }
-
- idVal, ok := idAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`id expected to be basetypes.StringValue, was: %T`, idAttribute))
- }
-
- maxGbAttribute, ok := attributes["max_gb"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `max_gb is missing from object`)
-
- return nil, diags
- }
-
- maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
- }
-
- memoryAttribute, ok := attributes["memory"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `memory is missing from object`)
-
- return nil, diags
- }
-
- memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
- }
-
- minGbAttribute, ok := attributes["min_gb"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `min_gb is missing from object`)
-
- return nil, diags
- }
-
- minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
- }
-
- nodeTypeAttribute, ok := attributes["node_type"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `node_type is missing from object`)
-
- return nil, diags
- }
-
- nodeTypeVal, ok := nodeTypeAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`node_type expected to be basetypes.StringValue, was: %T`, nodeTypeAttribute))
- }
-
- storageClassesAttribute, ok := attributes["storage_classes"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `storage_classes is missing from object`)
-
- return nil, diags
- }
-
- storageClassesVal, ok := storageClassesAttribute.(basetypes.ListValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`storage_classes expected to be basetypes.ListValue, was: %T`, storageClassesAttribute))
- }
-
- if diags.HasError() {
- return nil, diags
- }
-
- return FlavorsValue{
- Cpu: cpuVal,
- Description: descriptionVal,
- Id: idVal,
- MaxGb: maxGbVal,
- Memory: memoryVal,
- MinGb: minGbVal,
- NodeType: nodeTypeVal,
- StorageClasses: storageClassesVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewFlavorsValueNull() FlavorsValue {
- return FlavorsValue{
- state: attr.ValueStateNull,
- }
-}
-
-func NewFlavorsValueUnknown() FlavorsValue {
- return FlavorsValue{
- state: attr.ValueStateUnknown,
- }
-}
-
-func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (FlavorsValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
- ctx := context.Background()
-
- for name, attributeType := range attributeTypes {
- attribute, ok := attributes[name]
-
- if !ok {
- diags.AddError(
- "Missing FlavorsValue Attribute Value",
- "While creating a FlavorsValue value, a missing attribute value was detected. "+
- "A FlavorsValue must contain values for all attributes, even if null or unknown. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("FlavorsValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
- )
-
- continue
- }
-
- if !attributeType.Equal(attribute.Type(ctx)) {
- diags.AddError(
- "Invalid FlavorsValue Attribute Type",
- "While creating a FlavorsValue value, an invalid attribute value was detected. "+
- "A FlavorsValue must use a matching attribute type for the value. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("FlavorsValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
- fmt.Sprintf("FlavorsValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
- )
- }
- }
-
- for name := range attributes {
- _, ok := attributeTypes[name]
-
- if !ok {
- diags.AddError(
- "Extra FlavorsValue Attribute Value",
- "While creating a FlavorsValue value, an extra attribute value was detected. "+
- "A FlavorsValue must not contain values beyond the expected attribute types. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("Extra FlavorsValue Attribute Name: %s", name),
- )
- }
- }
-
- if diags.HasError() {
- return NewFlavorsValueUnknown(), diags
- }
-
- cpuAttribute, ok := attributes["cpu"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `cpu is missing from object`)
-
- return NewFlavorsValueUnknown(), diags
- }
-
- cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
- }
-
- descriptionAttribute, ok := attributes["description"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `description is missing from object`)
-
- return NewFlavorsValueUnknown(), diags
- }
-
- descriptionVal, ok := descriptionAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`description expected to be basetypes.StringValue, was: %T`, descriptionAttribute))
- }
-
- idAttribute, ok := attributes["id"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `id is missing from object`)
-
- return NewFlavorsValueUnknown(), diags
- }
-
- idVal, ok := idAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`id expected to be basetypes.StringValue, was: %T`, idAttribute))
- }
-
- maxGbAttribute, ok := attributes["max_gb"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `max_gb is missing from object`)
-
- return NewFlavorsValueUnknown(), diags
- }
-
- maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
- }
-
- memoryAttribute, ok := attributes["memory"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `memory is missing from object`)
-
- return NewFlavorsValueUnknown(), diags
- }
-
- memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
- }
-
- minGbAttribute, ok := attributes["min_gb"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `min_gb is missing from object`)
-
- return NewFlavorsValueUnknown(), diags
- }
-
- minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
- }
-
- nodeTypeAttribute, ok := attributes["node_type"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `node_type is missing from object`)
-
- return NewFlavorsValueUnknown(), diags
- }
-
- nodeTypeVal, ok := nodeTypeAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`node_type expected to be basetypes.StringValue, was: %T`, nodeTypeAttribute))
- }
-
- storageClassesAttribute, ok := attributes["storage_classes"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `storage_classes is missing from object`)
-
- return NewFlavorsValueUnknown(), diags
- }
-
- storageClassesVal, ok := storageClassesAttribute.(basetypes.ListValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`storage_classes expected to be basetypes.ListValue, was: %T`, storageClassesAttribute))
- }
-
- if diags.HasError() {
- return NewFlavorsValueUnknown(), diags
- }
-
- return FlavorsValue{
- Cpu: cpuVal,
- Description: descriptionVal,
- Id: idVal,
- MaxGb: maxGbVal,
- Memory: memoryVal,
- MinGb: minGbVal,
- NodeType: nodeTypeVal,
- StorageClasses: storageClassesVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewFlavorsValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) FlavorsValue {
- object, diags := NewFlavorsValue(attributeTypes, attributes)
-
- if diags.HasError() {
- // This could potentially be added to the diag package.
- diagsStrings := make([]string, 0, len(diags))
-
- for _, diagnostic := range diags {
- diagsStrings = append(diagsStrings, fmt.Sprintf(
- "%s | %s | %s",
- diagnostic.Severity(),
- diagnostic.Summary(),
- diagnostic.Detail()))
- }
-
- panic("NewFlavorsValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
- }
-
- return object
-}
-
-func (t FlavorsType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
- if in.Type() == nil {
- return NewFlavorsValueNull(), nil
- }
-
- if !in.Type().Equal(t.TerraformType(ctx)) {
- return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
- }
-
- if !in.IsKnown() {
- return NewFlavorsValueUnknown(), nil
- }
-
- if in.IsNull() {
- return NewFlavorsValueNull(), nil
- }
-
- attributes := map[string]attr.Value{}
-
- val := map[string]tftypes.Value{}
-
- err := in.As(&val)
-
- if err != nil {
- return nil, err
- }
-
- for k, v := range val {
- a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
-
- if err != nil {
- return nil, err
- }
-
- attributes[k] = a
- }
-
- return NewFlavorsValueMust(FlavorsValue{}.AttributeTypes(ctx), attributes), nil
-}
-
-func (t FlavorsType) ValueType(ctx context.Context) attr.Value {
- return FlavorsValue{}
-}
-
-var _ basetypes.ObjectValuable = FlavorsValue{}
-
-type FlavorsValue struct {
- Cpu basetypes.Int64Value `tfsdk:"cpu"`
- Description basetypes.StringValue `tfsdk:"description"`
- Id basetypes.StringValue `tfsdk:"id"`
- MaxGb basetypes.Int64Value `tfsdk:"max_gb"`
- Memory basetypes.Int64Value `tfsdk:"memory"`
- MinGb basetypes.Int64Value `tfsdk:"min_gb"`
- NodeType basetypes.StringValue `tfsdk:"node_type"`
- StorageClasses basetypes.ListValue `tfsdk:"storage_classes"`
- state attr.ValueState
-}
-
-func (v FlavorsValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 8)
-
- var val tftypes.Value
- var err error
-
- attrTypes["cpu"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["description"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["id"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["max_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["memory"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["min_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["node_type"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["storage_classes"] = basetypes.ListType{
- ElemType: StorageClassesValue{}.Type(ctx),
- }.TerraformType(ctx)
-
- objectType := tftypes.Object{AttributeTypes: attrTypes}
-
- switch v.state {
- case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 8)
-
- val, err = v.Cpu.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["cpu"] = val
-
- val, err = v.Description.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["description"] = val
-
- val, err = v.Id.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["id"] = val
-
- val, err = v.MaxGb.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["max_gb"] = val
-
- val, err = v.Memory.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["memory"] = val
-
- val, err = v.MinGb.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["min_gb"] = val
-
- val, err = v.NodeType.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["node_type"] = val
-
- val, err = v.StorageClasses.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["storage_classes"] = val
-
- if err := tftypes.ValidateValue(objectType, vals); err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- return tftypes.NewValue(objectType, vals), nil
- case attr.ValueStateNull:
- return tftypes.NewValue(objectType, nil), nil
- case attr.ValueStateUnknown:
- return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
- default:
- panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
- }
-}
-
-func (v FlavorsValue) IsNull() bool {
- return v.state == attr.ValueStateNull
-}
-
-func (v FlavorsValue) IsUnknown() bool {
- return v.state == attr.ValueStateUnknown
-}
-
-func (v FlavorsValue) String() string {
- return "FlavorsValue"
-}
-
-func (v FlavorsValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- storageClasses := types.ListValueMust(
- StorageClassesType{
- basetypes.ObjectType{
- AttrTypes: StorageClassesValue{}.AttributeTypes(ctx),
- },
- },
- v.StorageClasses.Elements(),
- )
-
- if v.StorageClasses.IsNull() {
- storageClasses = types.ListNull(
- StorageClassesType{
- basetypes.ObjectType{
- AttrTypes: StorageClassesValue{}.AttributeTypes(ctx),
- },
- },
- )
- }
-
- if v.StorageClasses.IsUnknown() {
- storageClasses = types.ListUnknown(
- StorageClassesType{
- basetypes.ObjectType{
- AttrTypes: StorageClassesValue{}.AttributeTypes(ctx),
- },
- },
- )
- }
-
- attributeTypes := map[string]attr.Type{
- "cpu": basetypes.Int64Type{},
- "description": basetypes.StringType{},
- "id": basetypes.StringType{},
- "max_gb": basetypes.Int64Type{},
- "memory": basetypes.Int64Type{},
- "min_gb": basetypes.Int64Type{},
- "node_type": basetypes.StringType{},
- "storage_classes": basetypes.ListType{
- ElemType: StorageClassesValue{}.Type(ctx),
- },
- }
-
- if v.IsNull() {
- return types.ObjectNull(attributeTypes), diags
- }
-
- if v.IsUnknown() {
- return types.ObjectUnknown(attributeTypes), diags
- }
-
- objVal, diags := types.ObjectValue(
- attributeTypes,
- map[string]attr.Value{
- "cpu": v.Cpu,
- "description": v.Description,
- "id": v.Id,
- "max_gb": v.MaxGb,
- "memory": v.Memory,
- "min_gb": v.MinGb,
- "node_type": v.NodeType,
- "storage_classes": storageClasses,
- })
-
- return objVal, diags
-}
-
-func (v FlavorsValue) Equal(o attr.Value) bool {
- other, ok := o.(FlavorsValue)
-
- if !ok {
- return false
- }
-
- if v.state != other.state {
- return false
- }
-
- if v.state != attr.ValueStateKnown {
- return true
- }
-
- if !v.Cpu.Equal(other.Cpu) {
- return false
- }
-
- if !v.Description.Equal(other.Description) {
- return false
- }
-
- if !v.Id.Equal(other.Id) {
- return false
- }
-
- if !v.MaxGb.Equal(other.MaxGb) {
- return false
- }
-
- if !v.Memory.Equal(other.Memory) {
- return false
- }
-
- if !v.MinGb.Equal(other.MinGb) {
- return false
- }
-
- if !v.NodeType.Equal(other.NodeType) {
- return false
- }
-
- if !v.StorageClasses.Equal(other.StorageClasses) {
- return false
- }
-
- return true
-}
-
-func (v FlavorsValue) Type(ctx context.Context) attr.Type {
- return FlavorsType{
- basetypes.ObjectType{
- AttrTypes: v.AttributeTypes(ctx),
- },
- }
-}
-
-func (v FlavorsValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
- return map[string]attr.Type{
- "cpu": basetypes.Int64Type{},
- "description": basetypes.StringType{},
- "id": basetypes.StringType{},
- "max_gb": basetypes.Int64Type{},
- "memory": basetypes.Int64Type{},
- "min_gb": basetypes.Int64Type{},
- "node_type": basetypes.StringType{},
- "storage_classes": basetypes.ListType{
- ElemType: StorageClassesValue{}.Type(ctx),
- },
- }
-}
-
-var _ basetypes.ObjectTypable = StorageClassesType{}
-
-type StorageClassesType struct {
- basetypes.ObjectType
-}
-
-func (t StorageClassesType) Equal(o attr.Type) bool {
- other, ok := o.(StorageClassesType)
-
- if !ok {
- return false
- }
-
- return t.ObjectType.Equal(other.ObjectType)
-}
-
-func (t StorageClassesType) String() string {
- return "StorageClassesType"
-}
-
-func (t StorageClassesType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributes := in.Attributes()
-
- classAttribute, ok := attributes["class"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `class is missing from object`)
-
- return nil, diags
- }
-
- classVal, ok := classAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute))
- }
-
- maxIoPerSecAttribute, ok := attributes["max_io_per_sec"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `max_io_per_sec is missing from object`)
-
- return nil, diags
- }
-
- maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute))
- }
-
- maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `max_through_in_mb is missing from object`)
-
- return nil, diags
- }
-
- maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute))
- }
-
- if diags.HasError() {
- return nil, diags
- }
-
- return StorageClassesValue{
- Class: classVal,
- MaxIoPerSec: maxIoPerSecVal,
- MaxThroughInMb: maxThroughInMbVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewStorageClassesValueNull() StorageClassesValue {
- return StorageClassesValue{
- state: attr.ValueStateNull,
- }
-}
-
-func NewStorageClassesValueUnknown() StorageClassesValue {
- return StorageClassesValue{
- state: attr.ValueStateUnknown,
- }
-}
-
-func NewStorageClassesValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (StorageClassesValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
- ctx := context.Background()
-
- for name, attributeType := range attributeTypes {
- attribute, ok := attributes[name]
-
- if !ok {
- diags.AddError(
- "Missing StorageClassesValue Attribute Value",
- "While creating a StorageClassesValue value, a missing attribute value was detected. "+
- "A StorageClassesValue must contain values for all attributes, even if null or unknown. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("StorageClassesValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
- )
-
- continue
- }
-
- if !attributeType.Equal(attribute.Type(ctx)) {
- diags.AddError(
- "Invalid StorageClassesValue Attribute Type",
- "While creating a StorageClassesValue value, an invalid attribute value was detected. "+
- "A StorageClassesValue must use a matching attribute type for the value. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("StorageClassesValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
- fmt.Sprintf("StorageClassesValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
- )
- }
- }
-
- for name := range attributes {
- _, ok := attributeTypes[name]
-
- if !ok {
- diags.AddError(
- "Extra StorageClassesValue Attribute Value",
- "While creating a StorageClassesValue value, an extra attribute value was detected. "+
- "A StorageClassesValue must not contain values beyond the expected attribute types. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("Extra StorageClassesValue Attribute Name: %s", name),
- )
- }
- }
-
- if diags.HasError() {
- return NewStorageClassesValueUnknown(), diags
- }
-
- classAttribute, ok := attributes["class"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `class is missing from object`)
-
- return NewStorageClassesValueUnknown(), diags
- }
-
- classVal, ok := classAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute))
- }
-
- maxIoPerSecAttribute, ok := attributes["max_io_per_sec"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `max_io_per_sec is missing from object`)
-
- return NewStorageClassesValueUnknown(), diags
- }
-
- maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute))
- }
-
- maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `max_through_in_mb is missing from object`)
-
- return NewStorageClassesValueUnknown(), diags
- }
-
- maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute))
- }
-
- if diags.HasError() {
- return NewStorageClassesValueUnknown(), diags
- }
-
- return StorageClassesValue{
- Class: classVal,
- MaxIoPerSec: maxIoPerSecVal,
- MaxThroughInMb: maxThroughInMbVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewStorageClassesValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) StorageClassesValue {
- object, diags := NewStorageClassesValue(attributeTypes, attributes)
-
- if diags.HasError() {
- // This could potentially be added to the diag package.
- diagsStrings := make([]string, 0, len(diags))
-
- for _, diagnostic := range diags {
- diagsStrings = append(diagsStrings, fmt.Sprintf(
- "%s | %s | %s",
- diagnostic.Severity(),
- diagnostic.Summary(),
- diagnostic.Detail()))
- }
-
- panic("NewStorageClassesValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
- }
-
- return object
-}
-
-func (t StorageClassesType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
- if in.Type() == nil {
- return NewStorageClassesValueNull(), nil
- }
-
- if !in.Type().Equal(t.TerraformType(ctx)) {
- return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
- }
-
- if !in.IsKnown() {
- return NewStorageClassesValueUnknown(), nil
- }
-
- if in.IsNull() {
- return NewStorageClassesValueNull(), nil
- }
-
- attributes := map[string]attr.Value{}
-
- val := map[string]tftypes.Value{}
-
- err := in.As(&val)
-
- if err != nil {
- return nil, err
- }
-
- for k, v := range val {
- a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
-
- if err != nil {
- return nil, err
- }
-
- attributes[k] = a
- }
-
- return NewStorageClassesValueMust(StorageClassesValue{}.AttributeTypes(ctx), attributes), nil
-}
-
-func (t StorageClassesType) ValueType(ctx context.Context) attr.Value {
- return StorageClassesValue{}
-}
-
-var _ basetypes.ObjectValuable = StorageClassesValue{}
-
-type StorageClassesValue struct {
- Class basetypes.StringValue `tfsdk:"class"`
- MaxIoPerSec basetypes.Int64Value `tfsdk:"max_io_per_sec"`
- MaxThroughInMb basetypes.Int64Value `tfsdk:"max_through_in_mb"`
- state attr.ValueState
-}
-
-func (v StorageClassesValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 3)
-
- var val tftypes.Value
- var err error
-
- attrTypes["class"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["max_io_per_sec"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["max_through_in_mb"] = basetypes.Int64Type{}.TerraformType(ctx)
-
- objectType := tftypes.Object{AttributeTypes: attrTypes}
-
- switch v.state {
- case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 3)
-
- val, err = v.Class.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["class"] = val
-
- val, err = v.MaxIoPerSec.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["max_io_per_sec"] = val
-
- val, err = v.MaxThroughInMb.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["max_through_in_mb"] = val
-
- if err := tftypes.ValidateValue(objectType, vals); err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- return tftypes.NewValue(objectType, vals), nil
- case attr.ValueStateNull:
- return tftypes.NewValue(objectType, nil), nil
- case attr.ValueStateUnknown:
- return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
- default:
- panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
- }
-}
-
-func (v StorageClassesValue) IsNull() bool {
- return v.state == attr.ValueStateNull
-}
-
-func (v StorageClassesValue) IsUnknown() bool {
- return v.state == attr.ValueStateUnknown
-}
-
-func (v StorageClassesValue) String() string {
- return "StorageClassesValue"
-}
-
-func (v StorageClassesValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributeTypes := map[string]attr.Type{
- "class": basetypes.StringType{},
- "max_io_per_sec": basetypes.Int64Type{},
- "max_through_in_mb": basetypes.Int64Type{},
- }
-
- if v.IsNull() {
- return types.ObjectNull(attributeTypes), diags
- }
-
- if v.IsUnknown() {
- return types.ObjectUnknown(attributeTypes), diags
- }
-
- objVal, diags := types.ObjectValue(
- attributeTypes,
- map[string]attr.Value{
- "class": v.Class,
- "max_io_per_sec": v.MaxIoPerSec,
- "max_through_in_mb": v.MaxThroughInMb,
- })
-
- return objVal, diags
-}
-
-func (v StorageClassesValue) Equal(o attr.Value) bool {
- other, ok := o.(StorageClassesValue)
-
- if !ok {
- return false
- }
-
- if v.state != other.state {
- return false
- }
-
- if v.state != attr.ValueStateKnown {
- return true
- }
-
- if !v.Class.Equal(other.Class) {
- return false
- }
-
- if !v.MaxIoPerSec.Equal(other.MaxIoPerSec) {
- return false
- }
-
- if !v.MaxThroughInMb.Equal(other.MaxThroughInMb) {
- return false
- }
-
- return true
-}
-
-func (v StorageClassesValue) Type(ctx context.Context) attr.Type {
- return StorageClassesType{
- basetypes.ObjectType{
- AttrTypes: v.AttributeTypes(ctx),
- },
- }
-}
-
-func (v StorageClassesValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
- return map[string]attr.Type{
- "class": basetypes.StringType{},
- "max_io_per_sec": basetypes.Int64Type{},
- "max_through_in_mb": basetypes.Int64Type{},
- }
-}
-
-var _ basetypes.ObjectTypable = PaginationType{}
-
-type PaginationType struct {
- basetypes.ObjectType
-}
-
-func (t PaginationType) Equal(o attr.Type) bool {
- other, ok := o.(PaginationType)
-
- if !ok {
- return false
- }
-
- return t.ObjectType.Equal(other.ObjectType)
-}
-
-func (t PaginationType) String() string {
- return "PaginationType"
-}
-
-func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributes := in.Attributes()
-
- pageAttribute, ok := attributes["page"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `page is missing from object`)
-
- return nil, diags
- }
-
- pageVal, ok := pageAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
- }
-
- sizeAttribute, ok := attributes["size"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `size is missing from object`)
-
- return nil, diags
- }
-
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
- }
-
- sortAttribute, ok := attributes["sort"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `sort is missing from object`)
-
- return nil, diags
- }
-
- sortVal, ok := sortAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute))
- }
-
- totalPagesAttribute, ok := attributes["total_pages"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `total_pages is missing from object`)
-
- return nil, diags
- }
-
- totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
- }
-
- totalRowsAttribute, ok := attributes["total_rows"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `total_rows is missing from object`)
-
- return nil, diags
- }
-
- totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
- }
-
- if diags.HasError() {
- return nil, diags
- }
-
- return PaginationValue{
- Page: pageVal,
- Size: sizeVal,
- Sort: sortVal,
- TotalPages: totalPagesVal,
- TotalRows: totalRowsVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewPaginationValueNull() PaginationValue {
- return PaginationValue{
- state: attr.ValueStateNull,
- }
-}
-
-func NewPaginationValueUnknown() PaginationValue {
- return PaginationValue{
- state: attr.ValueStateUnknown,
- }
-}
-
-func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (PaginationValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
- ctx := context.Background()
-
- for name, attributeType := range attributeTypes {
- attribute, ok := attributes[name]
-
- if !ok {
- diags.AddError(
- "Missing PaginationValue Attribute Value",
- "While creating a PaginationValue value, a missing attribute value was detected. "+
- "A PaginationValue must contain values for all attributes, even if null or unknown. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
- )
-
- continue
- }
-
- if !attributeType.Equal(attribute.Type(ctx)) {
- diags.AddError(
- "Invalid PaginationValue Attribute Type",
- "While creating a PaginationValue value, an invalid attribute value was detected. "+
- "A PaginationValue must use a matching attribute type for the value. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
- fmt.Sprintf("PaginationValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
- )
- }
- }
-
- for name := range attributes {
- _, ok := attributeTypes[name]
-
- if !ok {
- diags.AddError(
- "Extra PaginationValue Attribute Value",
- "While creating a PaginationValue value, an extra attribute value was detected. "+
- "A PaginationValue must not contain values beyond the expected attribute types. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("Extra PaginationValue Attribute Name: %s", name),
- )
- }
- }
-
- if diags.HasError() {
- return NewPaginationValueUnknown(), diags
- }
-
- pageAttribute, ok := attributes["page"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `page is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- pageVal, ok := pageAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
- }
-
- sizeAttribute, ok := attributes["size"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `size is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
- }
-
- sortAttribute, ok := attributes["sort"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `sort is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- sortVal, ok := sortAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute))
- }
-
- totalPagesAttribute, ok := attributes["total_pages"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `total_pages is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
- }
-
- totalRowsAttribute, ok := attributes["total_rows"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `total_rows is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
- }
-
- if diags.HasError() {
- return NewPaginationValueUnknown(), diags
- }
-
- return PaginationValue{
- Page: pageVal,
- Size: sizeVal,
- Sort: sortVal,
- TotalPages: totalPagesVal,
- TotalRows: totalRowsVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewPaginationValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) PaginationValue {
- object, diags := NewPaginationValue(attributeTypes, attributes)
-
- if diags.HasError() {
- // This could potentially be added to the diag package.
- diagsStrings := make([]string, 0, len(diags))
-
- for _, diagnostic := range diags {
- diagsStrings = append(diagsStrings, fmt.Sprintf(
- "%s | %s | %s",
- diagnostic.Severity(),
- diagnostic.Summary(),
- diagnostic.Detail()))
- }
-
- panic("NewPaginationValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
- }
-
- return object
-}
-
-func (t PaginationType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
- if in.Type() == nil {
- return NewPaginationValueNull(), nil
- }
-
- if !in.Type().Equal(t.TerraformType(ctx)) {
- return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
- }
-
- if !in.IsKnown() {
- return NewPaginationValueUnknown(), nil
- }
-
- if in.IsNull() {
- return NewPaginationValueNull(), nil
- }
-
- attributes := map[string]attr.Value{}
-
- val := map[string]tftypes.Value{}
-
- err := in.As(&val)
-
- if err != nil {
- return nil, err
- }
-
- for k, v := range val {
- a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
-
- if err != nil {
- return nil, err
- }
-
- attributes[k] = a
- }
-
- return NewPaginationValueMust(PaginationValue{}.AttributeTypes(ctx), attributes), nil
-}
-
-func (t PaginationType) ValueType(ctx context.Context) attr.Value {
- return PaginationValue{}
-}
-
-var _ basetypes.ObjectValuable = PaginationValue{}
-
-type PaginationValue struct {
- Page basetypes.Int64Value `tfsdk:"page"`
- Size basetypes.Int64Value `tfsdk:"size"`
- Sort basetypes.StringValue `tfsdk:"sort"`
- TotalPages basetypes.Int64Value `tfsdk:"total_pages"`
- TotalRows basetypes.Int64Value `tfsdk:"total_rows"`
- state attr.ValueState
-}
-
-func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 5)
-
- var val tftypes.Value
- var err error
-
- attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx)
-
- objectType := tftypes.Object{AttributeTypes: attrTypes}
-
- switch v.state {
- case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 5)
-
- val, err = v.Page.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["page"] = val
-
- val, err = v.Size.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["size"] = val
-
- val, err = v.Sort.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["sort"] = val
-
- val, err = v.TotalPages.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["total_pages"] = val
-
- val, err = v.TotalRows.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["total_rows"] = val
-
- if err := tftypes.ValidateValue(objectType, vals); err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- return tftypes.NewValue(objectType, vals), nil
- case attr.ValueStateNull:
- return tftypes.NewValue(objectType, nil), nil
- case attr.ValueStateUnknown:
- return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
- default:
- panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
- }
-}
-
-func (v PaginationValue) IsNull() bool {
- return v.state == attr.ValueStateNull
-}
-
-func (v PaginationValue) IsUnknown() bool {
- return v.state == attr.ValueStateUnknown
-}
-
-func (v PaginationValue) String() string {
- return "PaginationValue"
-}
-
-func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributeTypes := map[string]attr.Type{
- "page": basetypes.Int64Type{},
- "size": basetypes.Int64Type{},
- "sort": basetypes.StringType{},
- "total_pages": basetypes.Int64Type{},
- "total_rows": basetypes.Int64Type{},
- }
-
- if v.IsNull() {
- return types.ObjectNull(attributeTypes), diags
- }
-
- if v.IsUnknown() {
- return types.ObjectUnknown(attributeTypes), diags
- }
-
- objVal, diags := types.ObjectValue(
- attributeTypes,
- map[string]attr.Value{
- "page": v.Page,
- "size": v.Size,
- "sort": v.Sort,
- "total_pages": v.TotalPages,
- "total_rows": v.TotalRows,
- })
-
- return objVal, diags
-}
-
-func (v PaginationValue) Equal(o attr.Value) bool {
- other, ok := o.(PaginationValue)
-
- if !ok {
- return false
- }
-
- if v.state != other.state {
- return false
- }
-
- if v.state != attr.ValueStateKnown {
- return true
- }
-
- if !v.Page.Equal(other.Page) {
- return false
- }
-
- if !v.Size.Equal(other.Size) {
- return false
- }
-
- if !v.Sort.Equal(other.Sort) {
- return false
- }
-
- if !v.TotalPages.Equal(other.TotalPages) {
- return false
- }
-
- if !v.TotalRows.Equal(other.TotalRows) {
- return false
- }
-
- return true
-}
-
-func (v PaginationValue) Type(ctx context.Context) attr.Type {
- return PaginationType{
- basetypes.ObjectType{
- AttrTypes: v.AttributeTypes(ctx),
- },
- }
-}
-
-func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
- return map[string]attr.Type{
- "page": basetypes.Int64Type{},
- "size": basetypes.Int64Type{},
- "sort": basetypes.StringType{},
- "total_pages": basetypes.Int64Type{},
- "total_rows": basetypes.Int64Type{},
- }
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/instance/datasource.go b/stackit/internal/services/sqlserverflexbeta/instance/datasource.go
deleted file mode 100644
index d2fd7bc3..00000000
--- a/stackit/internal/services/sqlserverflexbeta/instance/datasource.go
+++ /dev/null
@@ -1,146 +0,0 @@
-package sqlserverflexbeta
-
-import (
- "context"
- "fmt"
- "net/http"
-
- "github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-
- "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3beta1api"
-
- sqlserverflexbetaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen"
-)
-
-var _ datasource.DataSource = (*instanceDataSource)(nil)
-
-const errorPrefix = "[Sqlserverflexbeta - Instance]"
-
-func NewInstanceDataSource() datasource.DataSource {
- return &instanceDataSource{}
-}
-
-// dataSourceModel maps the data source schema data.
-type dataSourceModel struct {
- sqlserverflexbetaGen.InstanceModel
- TerraformID types.String `tfsdk:"id"`
-}
-
-type instanceDataSource struct {
- client *v3beta1api.APIClient
- providerData core.ProviderData
-}
-
-func (d *instanceDataSource) Metadata(
- _ context.Context,
- req datasource.MetadataRequest,
- resp *datasource.MetadataResponse,
-) {
- resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_instance"
-}
-
-func (d *instanceDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- resp.Schema = sqlserverflexbetaGen.InstanceDataSourceSchema(ctx)
-}
-
-// Configure adds the provider configured client to the data source.
-func (d *instanceDataSource) Configure(
- ctx context.Context,
- req datasource.ConfigureRequest,
- resp *datasource.ConfigureResponse,
-) {
- var ok bool
- d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
- if !ok {
- return
- }
-
- apiClientConfigOptions := []config.ConfigurationOption{
- config.WithCustomAuth(d.providerData.RoundTripper),
- utils.UserAgentConfigOption(d.providerData.Version),
- }
- if d.providerData.SQLServerFlexCustomEndpoint != "" {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithEndpoint(d.providerData.SQLServerFlexCustomEndpoint),
- )
- } else {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithRegion(d.providerData.GetRegion()),
- )
- }
- apiClient, err := v3beta1api.NewAPIClient(apiClientConfigOptions...)
- if err != nil {
- resp.Diagnostics.AddError(
- "Error configuring API client",
- fmt.Sprintf(
- "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
- err,
- ),
- )
- return
- }
- d.client = apiClient
- tflog.Info(ctx, fmt.Sprintf("%s client configured", errorPrefix))
-}
-
-func (d *instanceDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var data dataSourceModel
-
- // Read Terraform configuration data into the model
- resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := data.ProjectId.ValueString()
- region := d.providerData.GetRegionWithOverride(data.Region)
- instanceId := data.InstanceId.ValueString()
-
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
-
- instanceResp, err := d.client.DefaultAPI.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
- if err != nil {
- utils.LogError(
- ctx,
- &resp.Diagnostics,
- err,
- "Reading instance",
- fmt.Sprintf("instance with ID %q does not exist in project %q.", instanceId, projectId),
- map[int]string{
- http.StatusForbidden: fmt.Sprintf("Project with ID %q not found or forbidden access", projectId),
- },
- )
- resp.State.RemoveResource(ctx)
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- err = mapDataResponseToModel(ctx, instanceResp, &data, resp.Diagnostics)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- fmt.Sprintf("%s Read", errorPrefix),
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
-
- // Save data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen/instance_data_source_gen.go b/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen/instance_data_source_gen.go
deleted file mode 100644
index f3226581..00000000
--- a/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen/instance_data_source_gen.go
+++ /dev/null
@@ -1,1579 +0,0 @@
-// Code generated by terraform-plugin-framework-generator DO NOT EDIT.
-
-package sqlserverflexbeta
-
-import (
- "context"
- "fmt"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
- "github.com/hashicorp/terraform-plugin-go/tftypes"
- "strings"
-
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
-)
-
-func InstanceDataSourceSchema(ctx context.Context) schema.Schema {
- return schema.Schema{
- Attributes: map[string]schema.Attribute{
- "backup_schedule": schema.StringAttribute{
- Computed: true,
- Description: "The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.",
- MarkdownDescription: "The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.",
- },
- "edition": schema.StringAttribute{
- Computed: true,
- Description: "Edition of the MSSQL server instance",
- MarkdownDescription: "Edition of the MSSQL server instance",
- },
- "encryption": schema.SingleNestedAttribute{
- Attributes: map[string]schema.Attribute{
- "kek_key_id": schema.StringAttribute{
- Computed: true,
- Description: "The key identifier",
- MarkdownDescription: "The key identifier",
- },
- "kek_key_ring_id": schema.StringAttribute{
- Computed: true,
- Description: "The keyring identifier",
- MarkdownDescription: "The keyring identifier",
- },
- "kek_key_version": schema.StringAttribute{
- Computed: true,
- Description: "The key version",
- MarkdownDescription: "The key version",
- },
- "service_account": schema.StringAttribute{
- Computed: true,
- },
- },
- CustomType: EncryptionType{
- ObjectType: types.ObjectType{
- AttrTypes: EncryptionValue{}.AttributeTypes(ctx),
- },
- },
- Computed: true,
- Description: "this defines which key to use for storage encryption",
- MarkdownDescription: "this defines which key to use for storage encryption",
- },
- "flavor_id": schema.StringAttribute{
- Computed: true,
- Description: "The id of the instance flavor.",
- MarkdownDescription: "The id of the instance flavor.",
- },
- "tf_original_api_id": schema.StringAttribute{
- Computed: true,
- Description: "The ID of the instance.",
- MarkdownDescription: "The ID of the instance.",
- },
- "instance_id": schema.StringAttribute{
- Required: true,
- Description: "The ID of the instance.",
- MarkdownDescription: "The ID of the instance.",
- },
- "is_deletable": schema.BoolAttribute{
- Computed: true,
- Description: "Whether the instance can be deleted or not.",
- MarkdownDescription: "Whether the instance can be deleted or not.",
- },
- "name": schema.StringAttribute{
- Computed: true,
- Description: "The name of the instance.",
- MarkdownDescription: "The name of the instance.",
- },
- "network": schema.SingleNestedAttribute{
- Attributes: map[string]schema.Attribute{
- "access_scope": schema.StringAttribute{
- Computed: true,
- Description: "The network access scope of the instance\n\n⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.\n",
- MarkdownDescription: "The network access scope of the instance\n\n⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.\n",
- },
- "acl": schema.ListAttribute{
- ElementType: types.StringType,
- Computed: true,
- Description: "List of IPV4 cidr.",
- MarkdownDescription: "List of IPV4 cidr.",
- },
- "instance_address": schema.StringAttribute{
- Computed: true,
- },
- "router_address": schema.StringAttribute{
- Computed: true,
- },
- },
- CustomType: NetworkType{
- ObjectType: types.ObjectType{
- AttrTypes: NetworkValue{}.AttributeTypes(ctx),
- },
- },
- Computed: true,
- Description: "The access configuration of the instance",
- MarkdownDescription: "The access configuration of the instance",
- },
- "project_id": schema.StringAttribute{
- Required: true,
- Description: "The STACKIT project ID.",
- MarkdownDescription: "The STACKIT project ID.",
- },
- "region": schema.StringAttribute{
- Required: true,
- Description: "The region which should be addressed",
- MarkdownDescription: "The region which should be addressed",
- Validators: []validator.String{
- stringvalidator.OneOf(
- "eu01",
- ),
- },
- },
- "replicas": schema.Int64Attribute{
- Computed: true,
- Description: "How many replicas the instance should have.",
- MarkdownDescription: "How many replicas the instance should have.",
- },
- "retention_days": schema.Int64Attribute{
- Computed: true,
- Description: "The days for how long the backup files should be stored before cleaned up. 30 to 365",
- MarkdownDescription: "The days for how long the backup files should be stored before cleaned up. 30 to 365",
- },
- "status": schema.StringAttribute{
- Computed: true,
- },
- "storage": schema.SingleNestedAttribute{
- Attributes: map[string]schema.Attribute{
- "class": schema.StringAttribute{
- Computed: true,
- Description: "The storage class for the storage.",
- MarkdownDescription: "The storage class for the storage.",
- },
- "size": schema.Int64Attribute{
- Computed: true,
- Description: "The storage size in Gigabytes.",
- MarkdownDescription: "The storage size in Gigabytes.",
- },
- },
- CustomType: StorageType{
- ObjectType: types.ObjectType{
- AttrTypes: StorageValue{}.AttributeTypes(ctx),
- },
- },
- Computed: true,
- Description: "The object containing information about the storage size and class.",
- MarkdownDescription: "The object containing information about the storage size and class.",
- },
- "version": schema.StringAttribute{
- Computed: true,
- Description: "The sqlserver version used for the instance.",
- MarkdownDescription: "The sqlserver version used for the instance.",
- },
- },
- }
-}
-
-type InstanceModel struct {
- BackupSchedule types.String `tfsdk:"backup_schedule"`
- Edition types.String `tfsdk:"edition"`
- Encryption EncryptionValue `tfsdk:"encryption"`
- FlavorId types.String `tfsdk:"flavor_id"`
- Id types.String `tfsdk:"tf_original_api_id"`
- InstanceId types.String `tfsdk:"instance_id"`
- IsDeletable types.Bool `tfsdk:"is_deletable"`
- Name types.String `tfsdk:"name"`
- Network NetworkValue `tfsdk:"network"`
- ProjectId types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- Replicas types.Int64 `tfsdk:"replicas"`
- RetentionDays types.Int64 `tfsdk:"retention_days"`
- Status types.String `tfsdk:"status"`
- Storage StorageValue `tfsdk:"storage"`
- Version types.String `tfsdk:"version"`
-}
-
-var _ basetypes.ObjectTypable = EncryptionType{}
-
-type EncryptionType struct {
- basetypes.ObjectType
-}
-
-func (t EncryptionType) Equal(o attr.Type) bool {
- other, ok := o.(EncryptionType)
-
- if !ok {
- return false
- }
-
- return t.ObjectType.Equal(other.ObjectType)
-}
-
-func (t EncryptionType) String() string {
- return "EncryptionType"
-}
-
-func (t EncryptionType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributes := in.Attributes()
-
- kekKeyIdAttribute, ok := attributes["kek_key_id"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `kek_key_id is missing from object`)
-
- return nil, diags
- }
-
- kekKeyIdVal, ok := kekKeyIdAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`kek_key_id expected to be basetypes.StringValue, was: %T`, kekKeyIdAttribute))
- }
-
- kekKeyRingIdAttribute, ok := attributes["kek_key_ring_id"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `kek_key_ring_id is missing from object`)
-
- return nil, diags
- }
-
- kekKeyRingIdVal, ok := kekKeyRingIdAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`kek_key_ring_id expected to be basetypes.StringValue, was: %T`, kekKeyRingIdAttribute))
- }
-
- kekKeyVersionAttribute, ok := attributes["kek_key_version"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `kek_key_version is missing from object`)
-
- return nil, diags
- }
-
- kekKeyVersionVal, ok := kekKeyVersionAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`kek_key_version expected to be basetypes.StringValue, was: %T`, kekKeyVersionAttribute))
- }
-
- serviceAccountAttribute, ok := attributes["service_account"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `service_account is missing from object`)
-
- return nil, diags
- }
-
- serviceAccountVal, ok := serviceAccountAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`service_account expected to be basetypes.StringValue, was: %T`, serviceAccountAttribute))
- }
-
- if diags.HasError() {
- return nil, diags
- }
-
- return EncryptionValue{
- KekKeyId: kekKeyIdVal,
- KekKeyRingId: kekKeyRingIdVal,
- KekKeyVersion: kekKeyVersionVal,
- ServiceAccount: serviceAccountVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewEncryptionValueNull() EncryptionValue {
- return EncryptionValue{
- state: attr.ValueStateNull,
- }
-}
-
-func NewEncryptionValueUnknown() EncryptionValue {
- return EncryptionValue{
- state: attr.ValueStateUnknown,
- }
-}
-
-func NewEncryptionValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (EncryptionValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
- ctx := context.Background()
-
- for name, attributeType := range attributeTypes {
- attribute, ok := attributes[name]
-
- if !ok {
- diags.AddError(
- "Missing EncryptionValue Attribute Value",
- "While creating a EncryptionValue value, a missing attribute value was detected. "+
- "A EncryptionValue must contain values for all attributes, even if null or unknown. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("EncryptionValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
- )
-
- continue
- }
-
- if !attributeType.Equal(attribute.Type(ctx)) {
- diags.AddError(
- "Invalid EncryptionValue Attribute Type",
- "While creating a EncryptionValue value, an invalid attribute value was detected. "+
- "A EncryptionValue must use a matching attribute type for the value. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("EncryptionValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
- fmt.Sprintf("EncryptionValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
- )
- }
- }
-
- for name := range attributes {
- _, ok := attributeTypes[name]
-
- if !ok {
- diags.AddError(
- "Extra EncryptionValue Attribute Value",
- "While creating a EncryptionValue value, an extra attribute value was detected. "+
- "A EncryptionValue must not contain values beyond the expected attribute types. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("Extra EncryptionValue Attribute Name: %s", name),
- )
- }
- }
-
- if diags.HasError() {
- return NewEncryptionValueUnknown(), diags
- }
-
- kekKeyIdAttribute, ok := attributes["kek_key_id"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `kek_key_id is missing from object`)
-
- return NewEncryptionValueUnknown(), diags
- }
-
- kekKeyIdVal, ok := kekKeyIdAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`kek_key_id expected to be basetypes.StringValue, was: %T`, kekKeyIdAttribute))
- }
-
- kekKeyRingIdAttribute, ok := attributes["kek_key_ring_id"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `kek_key_ring_id is missing from object`)
-
- return NewEncryptionValueUnknown(), diags
- }
-
- kekKeyRingIdVal, ok := kekKeyRingIdAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`kek_key_ring_id expected to be basetypes.StringValue, was: %T`, kekKeyRingIdAttribute))
- }
-
- kekKeyVersionAttribute, ok := attributes["kek_key_version"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `kek_key_version is missing from object`)
-
- return NewEncryptionValueUnknown(), diags
- }
-
- kekKeyVersionVal, ok := kekKeyVersionAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`kek_key_version expected to be basetypes.StringValue, was: %T`, kekKeyVersionAttribute))
- }
-
- serviceAccountAttribute, ok := attributes["service_account"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `service_account is missing from object`)
-
- return NewEncryptionValueUnknown(), diags
- }
-
- serviceAccountVal, ok := serviceAccountAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`service_account expected to be basetypes.StringValue, was: %T`, serviceAccountAttribute))
- }
-
- if diags.HasError() {
- return NewEncryptionValueUnknown(), diags
- }
-
- return EncryptionValue{
- KekKeyId: kekKeyIdVal,
- KekKeyRingId: kekKeyRingIdVal,
- KekKeyVersion: kekKeyVersionVal,
- ServiceAccount: serviceAccountVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewEncryptionValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) EncryptionValue {
- object, diags := NewEncryptionValue(attributeTypes, attributes)
-
- if diags.HasError() {
- // This could potentially be added to the diag package.
- diagsStrings := make([]string, 0, len(diags))
-
- for _, diagnostic := range diags {
- diagsStrings = append(diagsStrings, fmt.Sprintf(
- "%s | %s | %s",
- diagnostic.Severity(),
- diagnostic.Summary(),
- diagnostic.Detail()))
- }
-
- panic("NewEncryptionValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
- }
-
- return object
-}
-
-func (t EncryptionType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
- if in.Type() == nil {
- return NewEncryptionValueNull(), nil
- }
-
- if !in.Type().Equal(t.TerraformType(ctx)) {
- return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
- }
-
- if !in.IsKnown() {
- return NewEncryptionValueUnknown(), nil
- }
-
- if in.IsNull() {
- return NewEncryptionValueNull(), nil
- }
-
- attributes := map[string]attr.Value{}
-
- val := map[string]tftypes.Value{}
-
- err := in.As(&val)
-
- if err != nil {
- return nil, err
- }
-
- for k, v := range val {
- a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
-
- if err != nil {
- return nil, err
- }
-
- attributes[k] = a
- }
-
- return NewEncryptionValueMust(EncryptionValue{}.AttributeTypes(ctx), attributes), nil
-}
-
-func (t EncryptionType) ValueType(ctx context.Context) attr.Value {
- return EncryptionValue{}
-}
-
-var _ basetypes.ObjectValuable = EncryptionValue{}
-
-type EncryptionValue struct {
- KekKeyId basetypes.StringValue `tfsdk:"kek_key_id"`
- KekKeyRingId basetypes.StringValue `tfsdk:"kek_key_ring_id"`
- KekKeyVersion basetypes.StringValue `tfsdk:"kek_key_version"`
- ServiceAccount basetypes.StringValue `tfsdk:"service_account"`
- state attr.ValueState
-}
-
-func (v EncryptionValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 4)
-
- var val tftypes.Value
- var err error
-
- attrTypes["kek_key_id"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["kek_key_ring_id"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["kek_key_version"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["service_account"] = basetypes.StringType{}.TerraformType(ctx)
-
- objectType := tftypes.Object{AttributeTypes: attrTypes}
-
- switch v.state {
- case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 4)
-
- val, err = v.KekKeyId.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["kek_key_id"] = val
-
- val, err = v.KekKeyRingId.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["kek_key_ring_id"] = val
-
- val, err = v.KekKeyVersion.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["kek_key_version"] = val
-
- val, err = v.ServiceAccount.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["service_account"] = val
-
- if err := tftypes.ValidateValue(objectType, vals); err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- return tftypes.NewValue(objectType, vals), nil
- case attr.ValueStateNull:
- return tftypes.NewValue(objectType, nil), nil
- case attr.ValueStateUnknown:
- return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
- default:
- panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
- }
-}
-
-func (v EncryptionValue) IsNull() bool {
- return v.state == attr.ValueStateNull
-}
-
-func (v EncryptionValue) IsUnknown() bool {
- return v.state == attr.ValueStateUnknown
-}
-
-func (v EncryptionValue) String() string {
- return "EncryptionValue"
-}
-
-func (v EncryptionValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributeTypes := map[string]attr.Type{
- "kek_key_id": basetypes.StringType{},
- "kek_key_ring_id": basetypes.StringType{},
- "kek_key_version": basetypes.StringType{},
- "service_account": basetypes.StringType{},
- }
-
- if v.IsNull() {
- return types.ObjectNull(attributeTypes), diags
- }
-
- if v.IsUnknown() {
- return types.ObjectUnknown(attributeTypes), diags
- }
-
- objVal, diags := types.ObjectValue(
- attributeTypes,
- map[string]attr.Value{
- "kek_key_id": v.KekKeyId,
- "kek_key_ring_id": v.KekKeyRingId,
- "kek_key_version": v.KekKeyVersion,
- "service_account": v.ServiceAccount,
- })
-
- return objVal, diags
-}
-
-func (v EncryptionValue) Equal(o attr.Value) bool {
- other, ok := o.(EncryptionValue)
-
- if !ok {
- return false
- }
-
- if v.state != other.state {
- return false
- }
-
- if v.state != attr.ValueStateKnown {
- return true
- }
-
- if !v.KekKeyId.Equal(other.KekKeyId) {
- return false
- }
-
- if !v.KekKeyRingId.Equal(other.KekKeyRingId) {
- return false
- }
-
- if !v.KekKeyVersion.Equal(other.KekKeyVersion) {
- return false
- }
-
- if !v.ServiceAccount.Equal(other.ServiceAccount) {
- return false
- }
-
- return true
-}
-
-func (v EncryptionValue) Type(ctx context.Context) attr.Type {
- return EncryptionType{
- basetypes.ObjectType{
- AttrTypes: v.AttributeTypes(ctx),
- },
- }
-}
-
-func (v EncryptionValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
- return map[string]attr.Type{
- "kek_key_id": basetypes.StringType{},
- "kek_key_ring_id": basetypes.StringType{},
- "kek_key_version": basetypes.StringType{},
- "service_account": basetypes.StringType{},
- }
-}
-
-var _ basetypes.ObjectTypable = NetworkType{}
-
-type NetworkType struct {
- basetypes.ObjectType
-}
-
-func (t NetworkType) Equal(o attr.Type) bool {
- other, ok := o.(NetworkType)
-
- if !ok {
- return false
- }
-
- return t.ObjectType.Equal(other.ObjectType)
-}
-
-func (t NetworkType) String() string {
- return "NetworkType"
-}
-
-func (t NetworkType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributes := in.Attributes()
-
- accessScopeAttribute, ok := attributes["access_scope"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `access_scope is missing from object`)
-
- return nil, diags
- }
-
- accessScopeVal, ok := accessScopeAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`access_scope expected to be basetypes.StringValue, was: %T`, accessScopeAttribute))
- }
-
- aclAttribute, ok := attributes["acl"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `acl is missing from object`)
-
- return nil, diags
- }
-
- aclVal, ok := aclAttribute.(basetypes.ListValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`acl expected to be basetypes.ListValue, was: %T`, aclAttribute))
- }
-
- instanceAddressAttribute, ok := attributes["instance_address"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `instance_address is missing from object`)
-
- return nil, diags
- }
-
- instanceAddressVal, ok := instanceAddressAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`instance_address expected to be basetypes.StringValue, was: %T`, instanceAddressAttribute))
- }
-
- routerAddressAttribute, ok := attributes["router_address"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `router_address is missing from object`)
-
- return nil, diags
- }
-
- routerAddressVal, ok := routerAddressAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`router_address expected to be basetypes.StringValue, was: %T`, routerAddressAttribute))
- }
-
- if diags.HasError() {
- return nil, diags
- }
-
- return NetworkValue{
- AccessScope: accessScopeVal,
- Acl: aclVal,
- InstanceAddress: instanceAddressVal,
- RouterAddress: routerAddressVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewNetworkValueNull() NetworkValue {
- return NetworkValue{
- state: attr.ValueStateNull,
- }
-}
-
-func NewNetworkValueUnknown() NetworkValue {
- return NetworkValue{
- state: attr.ValueStateUnknown,
- }
-}
-
-func NewNetworkValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (NetworkValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
- ctx := context.Background()
-
- for name, attributeType := range attributeTypes {
- attribute, ok := attributes[name]
-
- if !ok {
- diags.AddError(
- "Missing NetworkValue Attribute Value",
- "While creating a NetworkValue value, a missing attribute value was detected. "+
- "A NetworkValue must contain values for all attributes, even if null or unknown. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("NetworkValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
- )
-
- continue
- }
-
- if !attributeType.Equal(attribute.Type(ctx)) {
- diags.AddError(
- "Invalid NetworkValue Attribute Type",
- "While creating a NetworkValue value, an invalid attribute value was detected. "+
- "A NetworkValue must use a matching attribute type for the value. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("NetworkValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
- fmt.Sprintf("NetworkValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
- )
- }
- }
-
- for name := range attributes {
- _, ok := attributeTypes[name]
-
- if !ok {
- diags.AddError(
- "Extra NetworkValue Attribute Value",
- "While creating a NetworkValue value, an extra attribute value was detected. "+
- "A NetworkValue must not contain values beyond the expected attribute types. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("Extra NetworkValue Attribute Name: %s", name),
- )
- }
- }
-
- if diags.HasError() {
- return NewNetworkValueUnknown(), diags
- }
-
- accessScopeAttribute, ok := attributes["access_scope"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `access_scope is missing from object`)
-
- return NewNetworkValueUnknown(), diags
- }
-
- accessScopeVal, ok := accessScopeAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`access_scope expected to be basetypes.StringValue, was: %T`, accessScopeAttribute))
- }
-
- aclAttribute, ok := attributes["acl"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `acl is missing from object`)
-
- return NewNetworkValueUnknown(), diags
- }
-
- aclVal, ok := aclAttribute.(basetypes.ListValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`acl expected to be basetypes.ListValue, was: %T`, aclAttribute))
- }
-
- instanceAddressAttribute, ok := attributes["instance_address"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `instance_address is missing from object`)
-
- return NewNetworkValueUnknown(), diags
- }
-
- instanceAddressVal, ok := instanceAddressAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`instance_address expected to be basetypes.StringValue, was: %T`, instanceAddressAttribute))
- }
-
- routerAddressAttribute, ok := attributes["router_address"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `router_address is missing from object`)
-
- return NewNetworkValueUnknown(), diags
- }
-
- routerAddressVal, ok := routerAddressAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`router_address expected to be basetypes.StringValue, was: %T`, routerAddressAttribute))
- }
-
- if diags.HasError() {
- return NewNetworkValueUnknown(), diags
- }
-
- return NetworkValue{
- AccessScope: accessScopeVal,
- Acl: aclVal,
- InstanceAddress: instanceAddressVal,
- RouterAddress: routerAddressVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewNetworkValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) NetworkValue {
- object, diags := NewNetworkValue(attributeTypes, attributes)
-
- if diags.HasError() {
- // This could potentially be added to the diag package.
- diagsStrings := make([]string, 0, len(diags))
-
- for _, diagnostic := range diags {
- diagsStrings = append(diagsStrings, fmt.Sprintf(
- "%s | %s | %s",
- diagnostic.Severity(),
- diagnostic.Summary(),
- diagnostic.Detail()))
- }
-
- panic("NewNetworkValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
- }
-
- return object
-}
-
-func (t NetworkType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
- if in.Type() == nil {
- return NewNetworkValueNull(), nil
- }
-
- if !in.Type().Equal(t.TerraformType(ctx)) {
- return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
- }
-
- if !in.IsKnown() {
- return NewNetworkValueUnknown(), nil
- }
-
- if in.IsNull() {
- return NewNetworkValueNull(), nil
- }
-
- attributes := map[string]attr.Value{}
-
- val := map[string]tftypes.Value{}
-
- err := in.As(&val)
-
- if err != nil {
- return nil, err
- }
-
- for k, v := range val {
- a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
-
- if err != nil {
- return nil, err
- }
-
- attributes[k] = a
- }
-
- return NewNetworkValueMust(NetworkValue{}.AttributeTypes(ctx), attributes), nil
-}
-
-func (t NetworkType) ValueType(ctx context.Context) attr.Value {
- return NetworkValue{}
-}
-
-var _ basetypes.ObjectValuable = NetworkValue{}
-
-type NetworkValue struct {
- AccessScope basetypes.StringValue `tfsdk:"access_scope"`
- Acl basetypes.ListValue `tfsdk:"acl"`
- InstanceAddress basetypes.StringValue `tfsdk:"instance_address"`
- RouterAddress basetypes.StringValue `tfsdk:"router_address"`
- state attr.ValueState
-}
-
-func (v NetworkValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 4)
-
- var val tftypes.Value
- var err error
-
- attrTypes["access_scope"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["acl"] = basetypes.ListType{
- ElemType: types.StringType,
- }.TerraformType(ctx)
- attrTypes["instance_address"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["router_address"] = basetypes.StringType{}.TerraformType(ctx)
-
- objectType := tftypes.Object{AttributeTypes: attrTypes}
-
- switch v.state {
- case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 4)
-
- val, err = v.AccessScope.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["access_scope"] = val
-
- val, err = v.Acl.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["acl"] = val
-
- val, err = v.InstanceAddress.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["instance_address"] = val
-
- val, err = v.RouterAddress.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["router_address"] = val
-
- if err := tftypes.ValidateValue(objectType, vals); err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- return tftypes.NewValue(objectType, vals), nil
- case attr.ValueStateNull:
- return tftypes.NewValue(objectType, nil), nil
- case attr.ValueStateUnknown:
- return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
- default:
- panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
- }
-}
-
-func (v NetworkValue) IsNull() bool {
- return v.state == attr.ValueStateNull
-}
-
-func (v NetworkValue) IsUnknown() bool {
- return v.state == attr.ValueStateUnknown
-}
-
-func (v NetworkValue) String() string {
- return "NetworkValue"
-}
-
-func (v NetworkValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- var aclVal basetypes.ListValue
- switch {
- case v.Acl.IsUnknown():
- aclVal = types.ListUnknown(types.StringType)
- case v.Acl.IsNull():
- aclVal = types.ListNull(types.StringType)
- default:
- var d diag.Diagnostics
- aclVal, d = types.ListValue(types.StringType, v.Acl.Elements())
- diags.Append(d...)
- }
-
- if diags.HasError() {
- return types.ObjectUnknown(map[string]attr.Type{
- "access_scope": basetypes.StringType{},
- "acl": basetypes.ListType{
- ElemType: types.StringType,
- },
- "instance_address": basetypes.StringType{},
- "router_address": basetypes.StringType{},
- }), diags
- }
-
- attributeTypes := map[string]attr.Type{
- "access_scope": basetypes.StringType{},
- "acl": basetypes.ListType{
- ElemType: types.StringType,
- },
- "instance_address": basetypes.StringType{},
- "router_address": basetypes.StringType{},
- }
-
- if v.IsNull() {
- return types.ObjectNull(attributeTypes), diags
- }
-
- if v.IsUnknown() {
- return types.ObjectUnknown(attributeTypes), diags
- }
-
- objVal, diags := types.ObjectValue(
- attributeTypes,
- map[string]attr.Value{
- "access_scope": v.AccessScope,
- "acl": aclVal,
- "instance_address": v.InstanceAddress,
- "router_address": v.RouterAddress,
- })
-
- return objVal, diags
-}
-
-func (v NetworkValue) Equal(o attr.Value) bool {
- other, ok := o.(NetworkValue)
-
- if !ok {
- return false
- }
-
- if v.state != other.state {
- return false
- }
-
- if v.state != attr.ValueStateKnown {
- return true
- }
-
- if !v.AccessScope.Equal(other.AccessScope) {
- return false
- }
-
- if !v.Acl.Equal(other.Acl) {
- return false
- }
-
- if !v.InstanceAddress.Equal(other.InstanceAddress) {
- return false
- }
-
- if !v.RouterAddress.Equal(other.RouterAddress) {
- return false
- }
-
- return true
-}
-
-func (v NetworkValue) Type(ctx context.Context) attr.Type {
- return NetworkType{
- basetypes.ObjectType{
- AttrTypes: v.AttributeTypes(ctx),
- },
- }
-}
-
-func (v NetworkValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
- return map[string]attr.Type{
- "access_scope": basetypes.StringType{},
- "acl": basetypes.ListType{
- ElemType: types.StringType,
- },
- "instance_address": basetypes.StringType{},
- "router_address": basetypes.StringType{},
- }
-}
-
-var _ basetypes.ObjectTypable = StorageType{}
-
-type StorageType struct {
- basetypes.ObjectType
-}
-
-func (t StorageType) Equal(o attr.Type) bool {
- other, ok := o.(StorageType)
-
- if !ok {
- return false
- }
-
- return t.ObjectType.Equal(other.ObjectType)
-}
-
-func (t StorageType) String() string {
- return "StorageType"
-}
-
-func (t StorageType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributes := in.Attributes()
-
- classAttribute, ok := attributes["class"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `class is missing from object`)
-
- return nil, diags
- }
-
- classVal, ok := classAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute))
- }
-
- sizeAttribute, ok := attributes["size"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `size is missing from object`)
-
- return nil, diags
- }
-
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
- }
-
- if diags.HasError() {
- return nil, diags
- }
-
- return StorageValue{
- Class: classVal,
- Size: sizeVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewStorageValueNull() StorageValue {
- return StorageValue{
- state: attr.ValueStateNull,
- }
-}
-
-func NewStorageValueUnknown() StorageValue {
- return StorageValue{
- state: attr.ValueStateUnknown,
- }
-}
-
-func NewStorageValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (StorageValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
- ctx := context.Background()
-
- for name, attributeType := range attributeTypes {
- attribute, ok := attributes[name]
-
- if !ok {
- diags.AddError(
- "Missing StorageValue Attribute Value",
- "While creating a StorageValue value, a missing attribute value was detected. "+
- "A StorageValue must contain values for all attributes, even if null or unknown. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("StorageValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
- )
-
- continue
- }
-
- if !attributeType.Equal(attribute.Type(ctx)) {
- diags.AddError(
- "Invalid StorageValue Attribute Type",
- "While creating a StorageValue value, an invalid attribute value was detected. "+
- "A StorageValue must use a matching attribute type for the value. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("StorageValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
- fmt.Sprintf("StorageValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
- )
- }
- }
-
- for name := range attributes {
- _, ok := attributeTypes[name]
-
- if !ok {
- diags.AddError(
- "Extra StorageValue Attribute Value",
- "While creating a StorageValue value, an extra attribute value was detected. "+
- "A StorageValue must not contain values beyond the expected attribute types. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("Extra StorageValue Attribute Name: %s", name),
- )
- }
- }
-
- if diags.HasError() {
- return NewStorageValueUnknown(), diags
- }
-
- classAttribute, ok := attributes["class"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `class is missing from object`)
-
- return NewStorageValueUnknown(), diags
- }
-
- classVal, ok := classAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute))
- }
-
- sizeAttribute, ok := attributes["size"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `size is missing from object`)
-
- return NewStorageValueUnknown(), diags
- }
-
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
- }
-
- if diags.HasError() {
- return NewStorageValueUnknown(), diags
- }
-
- return StorageValue{
- Class: classVal,
- Size: sizeVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewStorageValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) StorageValue {
- object, diags := NewStorageValue(attributeTypes, attributes)
-
- if diags.HasError() {
- // This could potentially be added to the diag package.
- diagsStrings := make([]string, 0, len(diags))
-
- for _, diagnostic := range diags {
- diagsStrings = append(diagsStrings, fmt.Sprintf(
- "%s | %s | %s",
- diagnostic.Severity(),
- diagnostic.Summary(),
- diagnostic.Detail()))
- }
-
- panic("NewStorageValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
- }
-
- return object
-}
-
-func (t StorageType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
- if in.Type() == nil {
- return NewStorageValueNull(), nil
- }
-
- if !in.Type().Equal(t.TerraformType(ctx)) {
- return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
- }
-
- if !in.IsKnown() {
- return NewStorageValueUnknown(), nil
- }
-
- if in.IsNull() {
- return NewStorageValueNull(), nil
- }
-
- attributes := map[string]attr.Value{}
-
- val := map[string]tftypes.Value{}
-
- err := in.As(&val)
-
- if err != nil {
- return nil, err
- }
-
- for k, v := range val {
- a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
-
- if err != nil {
- return nil, err
- }
-
- attributes[k] = a
- }
-
- return NewStorageValueMust(StorageValue{}.AttributeTypes(ctx), attributes), nil
-}
-
-func (t StorageType) ValueType(ctx context.Context) attr.Value {
- return StorageValue{}
-}
-
-var _ basetypes.ObjectValuable = StorageValue{}
-
-type StorageValue struct {
- Class basetypes.StringValue `tfsdk:"class"`
- Size basetypes.Int64Value `tfsdk:"size"`
- state attr.ValueState
-}
-
-func (v StorageValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 2)
-
- var val tftypes.Value
- var err error
-
- attrTypes["class"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
-
- objectType := tftypes.Object{AttributeTypes: attrTypes}
-
- switch v.state {
- case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 2)
-
- val, err = v.Class.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["class"] = val
-
- val, err = v.Size.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["size"] = val
-
- if err := tftypes.ValidateValue(objectType, vals); err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- return tftypes.NewValue(objectType, vals), nil
- case attr.ValueStateNull:
- return tftypes.NewValue(objectType, nil), nil
- case attr.ValueStateUnknown:
- return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
- default:
- panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
- }
-}
-
-func (v StorageValue) IsNull() bool {
- return v.state == attr.ValueStateNull
-}
-
-func (v StorageValue) IsUnknown() bool {
- return v.state == attr.ValueStateUnknown
-}
-
-func (v StorageValue) String() string {
- return "StorageValue"
-}
-
-func (v StorageValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributeTypes := map[string]attr.Type{
- "class": basetypes.StringType{},
- "size": basetypes.Int64Type{},
- }
-
- if v.IsNull() {
- return types.ObjectNull(attributeTypes), diags
- }
-
- if v.IsUnknown() {
- return types.ObjectUnknown(attributeTypes), diags
- }
-
- objVal, diags := types.ObjectValue(
- attributeTypes,
- map[string]attr.Value{
- "class": v.Class,
- "size": v.Size,
- })
-
- return objVal, diags
-}
-
-func (v StorageValue) Equal(o attr.Value) bool {
- other, ok := o.(StorageValue)
-
- if !ok {
- return false
- }
-
- if v.state != other.state {
- return false
- }
-
- if v.state != attr.ValueStateKnown {
- return true
- }
-
- if !v.Class.Equal(other.Class) {
- return false
- }
-
- if !v.Size.Equal(other.Size) {
- return false
- }
-
- return true
-}
-
-func (v StorageValue) Type(ctx context.Context) attr.Type {
- return StorageType{
- basetypes.ObjectType{
- AttrTypes: v.AttributeTypes(ctx),
- },
- }
-}
-
-func (v StorageValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
- return map[string]attr.Type{
- "class": basetypes.StringType{},
- "size": basetypes.Int64Type{},
- }
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen/instances_data_source_gen.go b/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen/instances_data_source_gen.go
deleted file mode 100644
index 04fff1f6..00000000
--- a/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen/instances_data_source_gen.go
+++ /dev/null
@@ -1,1172 +0,0 @@
-// Code generated by terraform-plugin-framework-generator DO NOT EDIT.
-
-package sqlserverflexbeta
-
-import (
- "context"
- "fmt"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
- "github.com/hashicorp/terraform-plugin-go/tftypes"
- "strings"
-
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
-)
-
-func InstancesDataSourceSchema(ctx context.Context) schema.Schema {
- return schema.Schema{
- Attributes: map[string]schema.Attribute{
- "instances": schema.ListNestedAttribute{
- NestedObject: schema.NestedAttributeObject{
- Attributes: map[string]schema.Attribute{
- "id": schema.StringAttribute{
- Computed: true,
- Description: "The ID of the instance.",
- MarkdownDescription: "The ID of the instance.",
- },
- "is_deletable": schema.BoolAttribute{
- Computed: true,
- Description: "Whether the instance can be deleted or not.",
- MarkdownDescription: "Whether the instance can be deleted or not.",
- },
- "name": schema.StringAttribute{
- Computed: true,
- Description: "The name of the instance.",
- MarkdownDescription: "The name of the instance.",
- },
- "status": schema.StringAttribute{
- Computed: true,
- },
- },
- CustomType: InstancesType{
- ObjectType: types.ObjectType{
- AttrTypes: InstancesValue{}.AttributeTypes(ctx),
- },
- },
- },
- Computed: true,
- Description: "List of owned instances and their current status.",
- MarkdownDescription: "List of owned instances and their current status.",
- },
- "page": schema.Int64Attribute{
- Optional: true,
- Computed: true,
- Description: "Number of the page of items list to be returned.",
- MarkdownDescription: "Number of the page of items list to be returned.",
- },
- "pagination": schema.SingleNestedAttribute{
- Attributes: map[string]schema.Attribute{
- "page": schema.Int64Attribute{
- Computed: true,
- },
- "size": schema.Int64Attribute{
- Computed: true,
- },
- "sort": schema.StringAttribute{
- Computed: true,
- },
- "total_pages": schema.Int64Attribute{
- Computed: true,
- },
- "total_rows": schema.Int64Attribute{
- Computed: true,
- },
- },
- CustomType: PaginationType{
- ObjectType: types.ObjectType{
- AttrTypes: PaginationValue{}.AttributeTypes(ctx),
- },
- },
- Computed: true,
- },
- "project_id": schema.StringAttribute{
- Required: true,
- Description: "The STACKIT project ID.",
- MarkdownDescription: "The STACKIT project ID.",
- },
- "region": schema.StringAttribute{
- Required: true,
- Description: "The region which should be addressed",
- MarkdownDescription: "The region which should be addressed",
- Validators: []validator.String{
- stringvalidator.OneOf(
- "eu01",
- ),
- },
- },
- "size": schema.Int64Attribute{
- Optional: true,
- Computed: true,
- Description: "Number of items to be returned on each page.",
- MarkdownDescription: "Number of items to be returned on each page.",
- },
- "sort": schema.StringAttribute{
- Optional: true,
- Computed: true,
- Description: "Sorting of the items to be returned on each page.",
- MarkdownDescription: "Sorting of the items to be returned on each page.",
- Validators: []validator.String{
- stringvalidator.OneOf(
- "index.desc",
- "index.asc",
- "id.desc",
- "id.asc",
- "is_deletable.desc",
- "is_deletable.asc",
- "name.asc",
- "name.desc",
- "status.asc",
- "status.desc",
- ),
- },
- },
- },
- }
-}
-
-type InstancesModel struct {
- Instances types.List `tfsdk:"instances"`
- Page types.Int64 `tfsdk:"page"`
- Pagination PaginationValue `tfsdk:"pagination"`
- ProjectId types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- Size types.Int64 `tfsdk:"size"`
- Sort types.String `tfsdk:"sort"`
-}
-
-var _ basetypes.ObjectTypable = InstancesType{}
-
-type InstancesType struct {
- basetypes.ObjectType
-}
-
-func (t InstancesType) Equal(o attr.Type) bool {
- other, ok := o.(InstancesType)
-
- if !ok {
- return false
- }
-
- return t.ObjectType.Equal(other.ObjectType)
-}
-
-func (t InstancesType) String() string {
- return "InstancesType"
-}
-
-func (t InstancesType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributes := in.Attributes()
-
- idAttribute, ok := attributes["id"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `id is missing from object`)
-
- return nil, diags
- }
-
- idVal, ok := idAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`id expected to be basetypes.StringValue, was: %T`, idAttribute))
- }
-
- isDeletableAttribute, ok := attributes["is_deletable"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `is_deletable is missing from object`)
-
- return nil, diags
- }
-
- isDeletableVal, ok := isDeletableAttribute.(basetypes.BoolValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`is_deletable expected to be basetypes.BoolValue, was: %T`, isDeletableAttribute))
- }
-
- nameAttribute, ok := attributes["name"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `name is missing from object`)
-
- return nil, diags
- }
-
- nameVal, ok := nameAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`name expected to be basetypes.StringValue, was: %T`, nameAttribute))
- }
-
- statusAttribute, ok := attributes["status"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `status is missing from object`)
-
- return nil, diags
- }
-
- statusVal, ok := statusAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`status expected to be basetypes.StringValue, was: %T`, statusAttribute))
- }
-
- if diags.HasError() {
- return nil, diags
- }
-
- return InstancesValue{
- Id: idVal,
- IsDeletable: isDeletableVal,
- Name: nameVal,
- Status: statusVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewInstancesValueNull() InstancesValue {
- return InstancesValue{
- state: attr.ValueStateNull,
- }
-}
-
-func NewInstancesValueUnknown() InstancesValue {
- return InstancesValue{
- state: attr.ValueStateUnknown,
- }
-}
-
-func NewInstancesValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (InstancesValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
- ctx := context.Background()
-
- for name, attributeType := range attributeTypes {
- attribute, ok := attributes[name]
-
- if !ok {
- diags.AddError(
- "Missing InstancesValue Attribute Value",
- "While creating a InstancesValue value, a missing attribute value was detected. "+
- "A InstancesValue must contain values for all attributes, even if null or unknown. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("InstancesValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
- )
-
- continue
- }
-
- if !attributeType.Equal(attribute.Type(ctx)) {
- diags.AddError(
- "Invalid InstancesValue Attribute Type",
- "While creating a InstancesValue value, an invalid attribute value was detected. "+
- "A InstancesValue must use a matching attribute type for the value. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("InstancesValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
- fmt.Sprintf("InstancesValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
- )
- }
- }
-
- for name := range attributes {
- _, ok := attributeTypes[name]
-
- if !ok {
- diags.AddError(
- "Extra InstancesValue Attribute Value",
- "While creating a InstancesValue value, an extra attribute value was detected. "+
- "A InstancesValue must not contain values beyond the expected attribute types. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("Extra InstancesValue Attribute Name: %s", name),
- )
- }
- }
-
- if diags.HasError() {
- return NewInstancesValueUnknown(), diags
- }
-
- idAttribute, ok := attributes["id"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `id is missing from object`)
-
- return NewInstancesValueUnknown(), diags
- }
-
- idVal, ok := idAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`id expected to be basetypes.StringValue, was: %T`, idAttribute))
- }
-
- isDeletableAttribute, ok := attributes["is_deletable"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `is_deletable is missing from object`)
-
- return NewInstancesValueUnknown(), diags
- }
-
- isDeletableVal, ok := isDeletableAttribute.(basetypes.BoolValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`is_deletable expected to be basetypes.BoolValue, was: %T`, isDeletableAttribute))
- }
-
- nameAttribute, ok := attributes["name"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `name is missing from object`)
-
- return NewInstancesValueUnknown(), diags
- }
-
- nameVal, ok := nameAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`name expected to be basetypes.StringValue, was: %T`, nameAttribute))
- }
-
- statusAttribute, ok := attributes["status"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `status is missing from object`)
-
- return NewInstancesValueUnknown(), diags
- }
-
- statusVal, ok := statusAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`status expected to be basetypes.StringValue, was: %T`, statusAttribute))
- }
-
- if diags.HasError() {
- return NewInstancesValueUnknown(), diags
- }
-
- return InstancesValue{
- Id: idVal,
- IsDeletable: isDeletableVal,
- Name: nameVal,
- Status: statusVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewInstancesValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) InstancesValue {
- object, diags := NewInstancesValue(attributeTypes, attributes)
-
- if diags.HasError() {
- // This could potentially be added to the diag package.
- diagsStrings := make([]string, 0, len(diags))
-
- for _, diagnostic := range diags {
- diagsStrings = append(diagsStrings, fmt.Sprintf(
- "%s | %s | %s",
- diagnostic.Severity(),
- diagnostic.Summary(),
- diagnostic.Detail()))
- }
-
- panic("NewInstancesValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
- }
-
- return object
-}
-
-func (t InstancesType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
- if in.Type() == nil {
- return NewInstancesValueNull(), nil
- }
-
- if !in.Type().Equal(t.TerraformType(ctx)) {
- return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
- }
-
- if !in.IsKnown() {
- return NewInstancesValueUnknown(), nil
- }
-
- if in.IsNull() {
- return NewInstancesValueNull(), nil
- }
-
- attributes := map[string]attr.Value{}
-
- val := map[string]tftypes.Value{}
-
- err := in.As(&val)
-
- if err != nil {
- return nil, err
- }
-
- for k, v := range val {
- a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
-
- if err != nil {
- return nil, err
- }
-
- attributes[k] = a
- }
-
- return NewInstancesValueMust(InstancesValue{}.AttributeTypes(ctx), attributes), nil
-}
-
-func (t InstancesType) ValueType(ctx context.Context) attr.Value {
- return InstancesValue{}
-}
-
-var _ basetypes.ObjectValuable = InstancesValue{}
-
-type InstancesValue struct {
- Id basetypes.StringValue `tfsdk:"id"`
- IsDeletable basetypes.BoolValue `tfsdk:"is_deletable"`
- Name basetypes.StringValue `tfsdk:"name"`
- Status basetypes.StringValue `tfsdk:"status"`
- state attr.ValueState
-}
-
-func (v InstancesValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 4)
-
- var val tftypes.Value
- var err error
-
- attrTypes["id"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["is_deletable"] = basetypes.BoolType{}.TerraformType(ctx)
- attrTypes["name"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["status"] = basetypes.StringType{}.TerraformType(ctx)
-
- objectType := tftypes.Object{AttributeTypes: attrTypes}
-
- switch v.state {
- case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 4)
-
- val, err = v.Id.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["id"] = val
-
- val, err = v.IsDeletable.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["is_deletable"] = val
-
- val, err = v.Name.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["name"] = val
-
- val, err = v.Status.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["status"] = val
-
- if err := tftypes.ValidateValue(objectType, vals); err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- return tftypes.NewValue(objectType, vals), nil
- case attr.ValueStateNull:
- return tftypes.NewValue(objectType, nil), nil
- case attr.ValueStateUnknown:
- return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
- default:
- panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
- }
-}
-
-func (v InstancesValue) IsNull() bool {
- return v.state == attr.ValueStateNull
-}
-
-func (v InstancesValue) IsUnknown() bool {
- return v.state == attr.ValueStateUnknown
-}
-
-func (v InstancesValue) String() string {
- return "InstancesValue"
-}
-
-func (v InstancesValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributeTypes := map[string]attr.Type{
- "id": basetypes.StringType{},
- "is_deletable": basetypes.BoolType{},
- "name": basetypes.StringType{},
- "status": basetypes.StringType{},
- }
-
- if v.IsNull() {
- return types.ObjectNull(attributeTypes), diags
- }
-
- if v.IsUnknown() {
- return types.ObjectUnknown(attributeTypes), diags
- }
-
- objVal, diags := types.ObjectValue(
- attributeTypes,
- map[string]attr.Value{
- "id": v.Id,
- "is_deletable": v.IsDeletable,
- "name": v.Name,
- "status": v.Status,
- })
-
- return objVal, diags
-}
-
-func (v InstancesValue) Equal(o attr.Value) bool {
- other, ok := o.(InstancesValue)
-
- if !ok {
- return false
- }
-
- if v.state != other.state {
- return false
- }
-
- if v.state != attr.ValueStateKnown {
- return true
- }
-
- if !v.Id.Equal(other.Id) {
- return false
- }
-
- if !v.IsDeletable.Equal(other.IsDeletable) {
- return false
- }
-
- if !v.Name.Equal(other.Name) {
- return false
- }
-
- if !v.Status.Equal(other.Status) {
- return false
- }
-
- return true
-}
-
-func (v InstancesValue) Type(ctx context.Context) attr.Type {
- return InstancesType{
- basetypes.ObjectType{
- AttrTypes: v.AttributeTypes(ctx),
- },
- }
-}
-
-func (v InstancesValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
- return map[string]attr.Type{
- "id": basetypes.StringType{},
- "is_deletable": basetypes.BoolType{},
- "name": basetypes.StringType{},
- "status": basetypes.StringType{},
- }
-}
-
-var _ basetypes.ObjectTypable = PaginationType{}
-
-type PaginationType struct {
- basetypes.ObjectType
-}
-
-func (t PaginationType) Equal(o attr.Type) bool {
- other, ok := o.(PaginationType)
-
- if !ok {
- return false
- }
-
- return t.ObjectType.Equal(other.ObjectType)
-}
-
-func (t PaginationType) String() string {
- return "PaginationType"
-}
-
-func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributes := in.Attributes()
-
- pageAttribute, ok := attributes["page"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `page is missing from object`)
-
- return nil, diags
- }
-
- pageVal, ok := pageAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
- }
-
- sizeAttribute, ok := attributes["size"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `size is missing from object`)
-
- return nil, diags
- }
-
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
- }
-
- sortAttribute, ok := attributes["sort"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `sort is missing from object`)
-
- return nil, diags
- }
-
- sortVal, ok := sortAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute))
- }
-
- totalPagesAttribute, ok := attributes["total_pages"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `total_pages is missing from object`)
-
- return nil, diags
- }
-
- totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
- }
-
- totalRowsAttribute, ok := attributes["total_rows"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `total_rows is missing from object`)
-
- return nil, diags
- }
-
- totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
- }
-
- if diags.HasError() {
- return nil, diags
- }
-
- return PaginationValue{
- Page: pageVal,
- Size: sizeVal,
- Sort: sortVal,
- TotalPages: totalPagesVal,
- TotalRows: totalRowsVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewPaginationValueNull() PaginationValue {
- return PaginationValue{
- state: attr.ValueStateNull,
- }
-}
-
-func NewPaginationValueUnknown() PaginationValue {
- return PaginationValue{
- state: attr.ValueStateUnknown,
- }
-}
-
-func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (PaginationValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
- ctx := context.Background()
-
- for name, attributeType := range attributeTypes {
- attribute, ok := attributes[name]
-
- if !ok {
- diags.AddError(
- "Missing PaginationValue Attribute Value",
- "While creating a PaginationValue value, a missing attribute value was detected. "+
- "A PaginationValue must contain values for all attributes, even if null or unknown. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
- )
-
- continue
- }
-
- if !attributeType.Equal(attribute.Type(ctx)) {
- diags.AddError(
- "Invalid PaginationValue Attribute Type",
- "While creating a PaginationValue value, an invalid attribute value was detected. "+
- "A PaginationValue must use a matching attribute type for the value. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
- fmt.Sprintf("PaginationValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
- )
- }
- }
-
- for name := range attributes {
- _, ok := attributeTypes[name]
-
- if !ok {
- diags.AddError(
- "Extra PaginationValue Attribute Value",
- "While creating a PaginationValue value, an extra attribute value was detected. "+
- "A PaginationValue must not contain values beyond the expected attribute types. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("Extra PaginationValue Attribute Name: %s", name),
- )
- }
- }
-
- if diags.HasError() {
- return NewPaginationValueUnknown(), diags
- }
-
- pageAttribute, ok := attributes["page"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `page is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- pageVal, ok := pageAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
- }
-
- sizeAttribute, ok := attributes["size"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `size is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
- }
-
- sortAttribute, ok := attributes["sort"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `sort is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- sortVal, ok := sortAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute))
- }
-
- totalPagesAttribute, ok := attributes["total_pages"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `total_pages is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
- }
-
- totalRowsAttribute, ok := attributes["total_rows"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `total_rows is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
- }
-
- if diags.HasError() {
- return NewPaginationValueUnknown(), diags
- }
-
- return PaginationValue{
- Page: pageVal,
- Size: sizeVal,
- Sort: sortVal,
- TotalPages: totalPagesVal,
- TotalRows: totalRowsVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewPaginationValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) PaginationValue {
- object, diags := NewPaginationValue(attributeTypes, attributes)
-
- if diags.HasError() {
- // This could potentially be added to the diag package.
- diagsStrings := make([]string, 0, len(diags))
-
- for _, diagnostic := range diags {
- diagsStrings = append(diagsStrings, fmt.Sprintf(
- "%s | %s | %s",
- diagnostic.Severity(),
- diagnostic.Summary(),
- diagnostic.Detail()))
- }
-
- panic("NewPaginationValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
- }
-
- return object
-}
-
-func (t PaginationType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
- if in.Type() == nil {
- return NewPaginationValueNull(), nil
- }
-
- if !in.Type().Equal(t.TerraformType(ctx)) {
- return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
- }
-
- if !in.IsKnown() {
- return NewPaginationValueUnknown(), nil
- }
-
- if in.IsNull() {
- return NewPaginationValueNull(), nil
- }
-
- attributes := map[string]attr.Value{}
-
- val := map[string]tftypes.Value{}
-
- err := in.As(&val)
-
- if err != nil {
- return nil, err
- }
-
- for k, v := range val {
- a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
-
- if err != nil {
- return nil, err
- }
-
- attributes[k] = a
- }
-
- return NewPaginationValueMust(PaginationValue{}.AttributeTypes(ctx), attributes), nil
-}
-
-func (t PaginationType) ValueType(ctx context.Context) attr.Value {
- return PaginationValue{}
-}
-
-var _ basetypes.ObjectValuable = PaginationValue{}
-
-type PaginationValue struct {
- Page basetypes.Int64Value `tfsdk:"page"`
- Size basetypes.Int64Value `tfsdk:"size"`
- Sort basetypes.StringValue `tfsdk:"sort"`
- TotalPages basetypes.Int64Value `tfsdk:"total_pages"`
- TotalRows basetypes.Int64Value `tfsdk:"total_rows"`
- state attr.ValueState
-}
-
-func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 5)
-
- var val tftypes.Value
- var err error
-
- attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx)
-
- objectType := tftypes.Object{AttributeTypes: attrTypes}
-
- switch v.state {
- case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 5)
-
- val, err = v.Page.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["page"] = val
-
- val, err = v.Size.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["size"] = val
-
- val, err = v.Sort.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["sort"] = val
-
- val, err = v.TotalPages.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["total_pages"] = val
-
- val, err = v.TotalRows.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["total_rows"] = val
-
- if err := tftypes.ValidateValue(objectType, vals); err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- return tftypes.NewValue(objectType, vals), nil
- case attr.ValueStateNull:
- return tftypes.NewValue(objectType, nil), nil
- case attr.ValueStateUnknown:
- return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
- default:
- panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
- }
-}
-
-func (v PaginationValue) IsNull() bool {
- return v.state == attr.ValueStateNull
-}
-
-func (v PaginationValue) IsUnknown() bool {
- return v.state == attr.ValueStateUnknown
-}
-
-func (v PaginationValue) String() string {
- return "PaginationValue"
-}
-
-func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributeTypes := map[string]attr.Type{
- "page": basetypes.Int64Type{},
- "size": basetypes.Int64Type{},
- "sort": basetypes.StringType{},
- "total_pages": basetypes.Int64Type{},
- "total_rows": basetypes.Int64Type{},
- }
-
- if v.IsNull() {
- return types.ObjectNull(attributeTypes), diags
- }
-
- if v.IsUnknown() {
- return types.ObjectUnknown(attributeTypes), diags
- }
-
- objVal, diags := types.ObjectValue(
- attributeTypes,
- map[string]attr.Value{
- "page": v.Page,
- "size": v.Size,
- "sort": v.Sort,
- "total_pages": v.TotalPages,
- "total_rows": v.TotalRows,
- })
-
- return objVal, diags
-}
-
-func (v PaginationValue) Equal(o attr.Value) bool {
- other, ok := o.(PaginationValue)
-
- if !ok {
- return false
- }
-
- if v.state != other.state {
- return false
- }
-
- if v.state != attr.ValueStateKnown {
- return true
- }
-
- if !v.Page.Equal(other.Page) {
- return false
- }
-
- if !v.Size.Equal(other.Size) {
- return false
- }
-
- if !v.Sort.Equal(other.Sort) {
- return false
- }
-
- if !v.TotalPages.Equal(other.TotalPages) {
- return false
- }
-
- if !v.TotalRows.Equal(other.TotalRows) {
- return false
- }
-
- return true
-}
-
-func (v PaginationValue) Type(ctx context.Context) attr.Type {
- return PaginationType{
- basetypes.ObjectType{
- AttrTypes: v.AttributeTypes(ctx),
- },
- }
-}
-
-func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
- return map[string]attr.Type{
- "page": basetypes.Int64Type{},
- "size": basetypes.Int64Type{},
- "sort": basetypes.StringType{},
- "total_pages": basetypes.Int64Type{},
- "total_rows": basetypes.Int64Type{},
- }
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/instance/functions.go b/stackit/internal/services/sqlserverflexbeta/instance/functions.go
deleted file mode 100644
index 18ad8dc0..00000000
--- a/stackit/internal/services/sqlserverflexbeta/instance/functions.go
+++ /dev/null
@@ -1,274 +0,0 @@
-package sqlserverflexbeta
-
-import (
- "context"
- "errors"
- "fmt"
- "math"
-
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/types"
-
- "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3beta1api"
-
- sqlserverflexbetaDataGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance/datasources_gen"
- sqlserverflexbetaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance/resources_gen"
-)
-
-func mapResponseToModel(
- ctx context.Context,
- resp *v3beta1api.GetInstanceResponse,
- m *sqlserverflexbetaResGen.InstanceModel,
- tfDiags diag.Diagnostics,
-) error {
- m.BackupSchedule = types.StringValue(resp.GetBackupSchedule())
- m.Edition = types.StringValue(string(resp.GetEdition()))
- m.Encryption = handleEncryption(ctx, m, resp)
- m.FlavorId = types.StringValue(resp.GetFlavorId())
- m.Id = types.StringValue(resp.GetId())
- m.InstanceId = types.StringValue(resp.GetId())
- m.IsDeletable = types.BoolValue(resp.GetIsDeletable())
- m.Name = types.StringValue(resp.GetName())
- netAcl, diags := types.ListValueFrom(ctx, types.StringType, resp.Network.GetAcl())
- tfDiags.Append(diags...)
- if diags.HasError() {
- return fmt.Errorf(
- "error converting network acl response value",
- )
- }
- net, diags := sqlserverflexbetaResGen.NewNetworkValue(
- sqlserverflexbetaResGen.NetworkValue{}.AttributeTypes(ctx),
- map[string]attr.Value{
- "access_scope": types.StringValue(string(resp.Network.GetAccessScope())),
- "acl": netAcl,
- "instance_address": types.StringValue(resp.Network.GetInstanceAddress()),
- "router_address": types.StringValue(resp.Network.GetRouterAddress()),
- },
- )
- tfDiags.Append(diags...)
- if diags.HasError() {
- return errors.New("error converting network response value")
- }
- m.Network = net
- m.Replicas = types.Int64Value(int64(resp.GetReplicas()))
- m.RetentionDays = types.Int64Value(int64(resp.GetRetentionDays()))
- m.Status = types.StringValue(string(resp.GetStatus()))
-
- stor, diags := sqlserverflexbetaResGen.NewStorageValue(
- sqlserverflexbetaResGen.StorageValue{}.AttributeTypes(ctx),
- map[string]attr.Value{
- "class": types.StringValue(resp.Storage.GetClass()),
- "size": types.Int64Value(resp.Storage.GetSize()),
- },
- )
- tfDiags.Append(diags...)
- if diags.HasError() {
- return fmt.Errorf("error converting storage response value")
- }
- m.Storage = stor
-
- m.Version = types.StringValue(string(resp.GetVersion()))
- return nil
-}
-
-func mapDataResponseToModel(
- ctx context.Context,
- resp *v3beta1api.GetInstanceResponse,
- m *dataSourceModel,
- tfDiags diag.Diagnostics,
-) error {
- m.BackupSchedule = types.StringValue(resp.GetBackupSchedule())
- m.Edition = types.StringValue(string(resp.GetEdition()))
- m.Encryption = handleDSEncryption(ctx, m, resp)
- m.FlavorId = types.StringValue(resp.GetFlavorId())
- m.Id = types.StringValue(resp.GetId())
- m.InstanceId = types.StringValue(resp.GetId())
- m.IsDeletable = types.BoolValue(resp.GetIsDeletable())
- m.Name = types.StringValue(resp.GetName())
- netAcl, diags := types.ListValueFrom(ctx, types.StringType, resp.Network.GetAcl())
- tfDiags.Append(diags...)
- if diags.HasError() {
- return fmt.Errorf(
- "error converting network acl response value",
- )
- }
- net, diags := sqlserverflexbetaDataGen.NewNetworkValue(
- sqlserverflexbetaDataGen.NetworkValue{}.AttributeTypes(ctx),
- map[string]attr.Value{
- "access_scope": types.StringValue(string(resp.Network.GetAccessScope())),
- "acl": netAcl,
- "instance_address": types.StringValue(resp.Network.GetInstanceAddress()),
- "router_address": types.StringValue(resp.Network.GetRouterAddress()),
- },
- )
- tfDiags.Append(diags...)
- if diags.HasError() {
- return errors.New("error converting network response value")
- }
- m.Network = net
- m.Replicas = types.Int64Value(int64(resp.GetReplicas()))
- m.RetentionDays = types.Int64Value(int64(resp.GetRetentionDays()))
- m.Status = types.StringValue(string(resp.GetStatus()))
-
- stor, diags := sqlserverflexbetaDataGen.NewStorageValue(
- sqlserverflexbetaDataGen.StorageValue{}.AttributeTypes(ctx),
- map[string]attr.Value{
- "class": types.StringValue(resp.Storage.GetClass()),
- "size": types.Int64Value(resp.Storage.GetSize()),
- },
- )
- tfDiags.Append(diags...)
- if diags.HasError() {
- return fmt.Errorf("error converting storage response value")
- }
- m.Storage = stor
-
- m.Version = types.StringValue(string(resp.GetVersion()))
- return nil
-}
-
-func handleEncryption(
- ctx context.Context,
- m *sqlserverflexbetaResGen.InstanceModel,
- resp *v3beta1api.GetInstanceResponse,
-) sqlserverflexbetaResGen.EncryptionValue {
- if !resp.HasEncryption() ||
- resp.Encryption == nil ||
- resp.Encryption.KekKeyId == "" ||
- resp.Encryption.KekKeyRingId == "" ||
- resp.Encryption.KekKeyVersion == "" ||
- resp.Encryption.ServiceAccount == "" {
- if m.Encryption.IsNull() || m.Encryption.IsUnknown() {
- return sqlserverflexbetaResGen.NewEncryptionValueNull()
- }
- return m.Encryption
- }
-
- enc := sqlserverflexbetaResGen.NewEncryptionValueMust(
- sqlserverflexbetaResGen.EncryptionValue{}.AttributeTypes(ctx),
- map[string]attr.Value{
- "kek_key_id": types.StringValue(resp.Encryption.GetKekKeyId()),
- "kek_key_ring_id": types.StringValue(resp.Encryption.GetKekKeyRingId()),
- "kek_key_version": types.StringValue(resp.Encryption.GetKekKeyVersion()),
- "service_account": types.StringValue(resp.Encryption.GetServiceAccount()),
- },
- )
- return enc
-}
-
-func handleDSEncryption(
- ctx context.Context,
- m *dataSourceModel,
- resp *v3beta1api.GetInstanceResponse,
-) sqlserverflexbetaDataGen.EncryptionValue {
- if !resp.HasEncryption() ||
- resp.Encryption == nil ||
- resp.Encryption.KekKeyId == "" ||
- resp.Encryption.KekKeyRingId == "" ||
- resp.Encryption.KekKeyVersion == "" ||
- resp.Encryption.ServiceAccount == "" {
- if m.Encryption.IsNull() || m.Encryption.IsUnknown() {
- return sqlserverflexbetaDataGen.NewEncryptionValueNull()
- }
- return m.Encryption
- }
-
- enc := sqlserverflexbetaDataGen.NewEncryptionValueMust(
- sqlserverflexbetaDataGen.EncryptionValue{}.AttributeTypes(ctx),
- map[string]attr.Value{
- "kek_key_id": types.StringValue(resp.Encryption.GetKekKeyId()),
- "kek_key_ring_id": types.StringValue(resp.Encryption.GetKekKeyRingId()),
- "kek_key_version": types.StringValue(resp.Encryption.GetKekKeyVersion()),
- "service_account": types.StringValue(resp.Encryption.GetServiceAccount()),
- },
- )
- return enc
-}
-
-func toCreatePayload(
- ctx context.Context,
- model *sqlserverflexbetaResGen.InstanceModel,
-) (*v3beta1api.CreateInstanceRequestPayload, error) {
- if model == nil {
- return nil, fmt.Errorf("nil model")
- }
-
- storagePayload := v3beta1api.StorageCreate{}
- if !model.Storage.IsNull() && !model.Storage.IsUnknown() {
- storagePayload.Class = model.Storage.Class.ValueString()
- storagePayload.Size = model.Storage.Size.ValueInt64()
- }
-
- var encryptionPayload *v3beta1api.InstanceEncryption = nil
- if !model.Encryption.IsNull() && !model.Encryption.IsUnknown() {
- encryptionPayload = &v3beta1api.InstanceEncryption{}
- encryptionPayload.KekKeyId = model.Encryption.KekKeyId.ValueString()
- encryptionPayload.KekKeyRingId = model.Encryption.KekKeyRingId.ValueString()
- encryptionPayload.KekKeyVersion = model.Encryption.KekKeyVersion.ValueString()
- encryptionPayload.ServiceAccount = model.Encryption.ServiceAccount.ValueString()
- }
-
- networkPayload := v3beta1api.CreateInstanceRequestPayloadNetwork{}
- if !model.Network.IsNull() && !model.Network.IsUnknown() {
- accScope := v3beta1api.InstanceNetworkAccessScope(
- model.Network.AccessScope.ValueString(),
- )
- networkPayload.AccessScope = &accScope
-
- var resList []string
- diags := model.Network.Acl.ElementsAs(ctx, &resList, false)
- if diags.HasError() {
- return nil, fmt.Errorf("error converting network acl list")
- }
- networkPayload.Acl = resList
- }
-
- return &v3beta1api.CreateInstanceRequestPayload{
- BackupSchedule: model.BackupSchedule.ValueString(),
- Encryption: encryptionPayload,
- FlavorId: model.FlavorId.ValueString(),
- Name: model.Name.ValueString(),
- Network: networkPayload,
- RetentionDays: int32(model.RetentionDays.ValueInt64()), //nolint:gosec // TODO
- Storage: storagePayload,
- Version: v3beta1api.InstanceVersion(model.Version.ValueString()),
- }, nil
-}
-
-func toUpdatePayload(
- ctx context.Context,
- m *sqlserverflexbetaResGen.InstanceModel,
- resp *resource.UpdateResponse,
-) (*v3beta1api.UpdateInstanceRequestPayload, error) {
- if m == nil {
- return nil, fmt.Errorf("nil model")
- }
- if m.Replicas.ValueInt64() > math.MaxUint32 {
- return nil, fmt.Errorf("replicas value is too big for uint32")
- }
- replVal := v3beta1api.Replicas(uint32(m.Replicas.ValueInt64())) // nolint:gosec // check is performed above
-
- var netACL []string
- diags := m.Network.Acl.ElementsAs(ctx, &netACL, false)
- resp.Diagnostics.Append(diags...)
- if diags.HasError() {
- return nil, fmt.Errorf("error converting model network acl value")
- }
- if m.RetentionDays.ValueInt64() > math.MaxInt32 {
- return nil, fmt.Errorf("value is too large for int32")
- }
- return &v3beta1api.UpdateInstanceRequestPayload{
- BackupSchedule: m.BackupSchedule.ValueString(),
- FlavorId: m.FlavorId.ValueString(),
- Name: m.Name.ValueString(),
- Network: v3beta1api.UpdateInstanceRequestPayloadNetwork{
- Acl: netACL,
- },
- Replicas: replVal,
- RetentionDays: int32(m.RetentionDays.ValueInt64()), //nolint:gosec // checked above
- Storage: v3beta1api.StorageUpdate{Size: m.Storage.Size.ValueInt64Pointer()},
- Version: v3beta1api.InstanceVersion(m.Version.ValueString()),
- }, nil
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/instance/functions_test.go b/stackit/internal/services/sqlserverflexbeta/instance/functions_test.go
deleted file mode 100644
index 03380d5d..00000000
--- a/stackit/internal/services/sqlserverflexbeta/instance/functions_test.go
+++ /dev/null
@@ -1,278 +0,0 @@
-package sqlserverflexbeta
-
-import (
- "context"
- "reflect"
- "testing"
-
- "github.com/google/go-cmp/cmp"
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/types"
- sqlserverflexbetaPkgGen "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3beta1api"
-
- sqlserverflexbetaRs "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance/resources_gen"
-)
-
-func Test_handleDSEncryption(t *testing.T) {
- type args struct {
- m *dataSourceModel
- resp *sqlserverflexbetaPkgGen.GetInstanceResponse
- }
- tests := []struct {
- name string
- args args
- want sqlserverflexbetaRs.EncryptionValue
- }{
- // TODO: Add test cases.
- }
- for _, tt := range tests {
- t.Run(
- tt.name, func(t *testing.T) {
- if got := handleDSEncryption(t.Context(), tt.args.m, tt.args.resp); !reflect.DeepEqual(got, tt.want) {
- t.Errorf("handleDSEncryption() = %v, want %v", got, tt.want)
- }
- },
- )
- }
-}
-
-func Test_handleEncryption(t *testing.T) {
- type args struct {
- m *sqlserverflexbetaRs.InstanceModel
- resp *sqlserverflexbetaPkgGen.GetInstanceResponse
- }
- tests := []struct {
- name string
- args args
- want sqlserverflexbetaRs.EncryptionValue
- }{
- {
- name: "nil response",
- args: args{
- m: &sqlserverflexbetaRs.InstanceModel{},
- resp: &sqlserverflexbetaPkgGen.GetInstanceResponse{},
- },
- want: sqlserverflexbetaRs.EncryptionValue{},
- },
- {
- name: "nil response",
- args: args{
- m: &sqlserverflexbetaRs.InstanceModel{},
- resp: &sqlserverflexbetaPkgGen.GetInstanceResponse{
- Encryption: &sqlserverflexbetaPkgGen.InstanceEncryption{},
- },
- },
- want: sqlserverflexbetaRs.NewEncryptionValueNull(),
- },
- {
- name: "response with values",
- args: args{
- m: &sqlserverflexbetaRs.InstanceModel{},
- resp: &sqlserverflexbetaPkgGen.GetInstanceResponse{
- Encryption: &sqlserverflexbetaPkgGen.InstanceEncryption{
- KekKeyId: ("kek_key_id"),
- KekKeyRingId: ("kek_key_ring_id"),
- KekKeyVersion: ("kek_key_version"),
- ServiceAccount: ("kek_svc_acc"),
- },
- },
- },
- want: sqlserverflexbetaRs.NewEncryptionValueMust(
- sqlserverflexbetaRs.EncryptionValue{}.AttributeTypes(context.TODO()),
- map[string]attr.Value{
- "kek_key_id": types.StringValue("kek_key_id"),
- "kek_key_ring_id": types.StringValue("kek_key_ring_id"),
- "kek_key_version": types.StringValue("kek_key_version"),
- "service_account": types.StringValue("kek_svc_acc"),
- },
- ),
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.name, func(t *testing.T) {
- got := handleEncryption(t.Context(), tt.args.m, tt.args.resp)
-
- diff := cmp.Diff(tt.want, got)
- if diff != "" {
- t.Fatalf("Data does not match: %s", diff)
- }
- },
- )
- }
-}
-
-func Test_mapDataResponseToModel(t *testing.T) {
- type args struct {
- ctx context.Context
- resp *sqlserverflexbetaPkgGen.GetInstanceResponse
- m *dataSourceModel
- tfDiags diag.Diagnostics
- }
- tests := []struct {
- name string
- args args
- wantErr bool
- }{
- // TODO: Add test cases.
- }
- for _, tt := range tests {
- t.Run(
- tt.name, func(t *testing.T) {
- if err := mapDataResponseToModel(
- tt.args.ctx,
- tt.args.resp,
- tt.args.m,
- tt.args.tfDiags,
- ); (err != nil) != tt.wantErr {
- t.Errorf("mapDataResponseToModel() error = %v, wantErr %v", err, tt.wantErr)
- }
- },
- )
- }
-}
-
-func Test_mapResponseToModel(t *testing.T) {
- type args struct {
- ctx context.Context
- resp *sqlserverflexbetaPkgGen.GetInstanceResponse
- m *sqlserverflexbetaRs.InstanceModel
- tfDiags diag.Diagnostics
- }
- tests := []struct {
- name string
- args args
- wantErr bool
- }{
- // TODO: Add test cases.
- }
- for _, tt := range tests {
- t.Run(
- tt.name, func(t *testing.T) {
- if err := mapResponseToModel(
- tt.args.ctx,
- tt.args.resp,
- tt.args.m,
- tt.args.tfDiags,
- ); (err != nil) != tt.wantErr {
- t.Errorf("mapResponseToModel() error = %v, wantErr %v", err, tt.wantErr)
- }
- },
- )
- }
-}
-
-func Test_toCreatePayload(t *testing.T) {
- type args struct {
- ctx context.Context
- model *sqlserverflexbetaRs.InstanceModel
- }
- tests := []struct {
- name string
- args args
- want *sqlserverflexbetaPkgGen.CreateInstanceRequestPayload
- wantErr bool
- }{
- {
- name: "simple",
- args: args{
- ctx: context.Background(),
- model: &sqlserverflexbetaRs.InstanceModel{
- Encryption: sqlserverflexbetaRs.NewEncryptionValueMust(
- sqlserverflexbetaRs.EncryptionValue{}.AttributeTypes(context.Background()),
- map[string]attr.Value{
- "kek_key_id": types.StringValue("kek_key_id"),
- "kek_key_ring_id": types.StringValue("kek_key_ring_id"),
- "kek_key_version": types.StringValue("kek_key_version"),
- "service_account": types.StringValue("sacc"),
- },
- ),
- Storage: sqlserverflexbetaRs.StorageValue{},
- },
- },
- want: &sqlserverflexbetaPkgGen.CreateInstanceRequestPayload{
- BackupSchedule: "",
- Encryption: &sqlserverflexbetaPkgGen.InstanceEncryption{
- KekKeyId: ("kek_key_id"),
- KekKeyRingId: ("kek_key_ring_id"),
- KekKeyVersion: ("kek_key_version"),
- ServiceAccount: ("sacc"),
- },
- FlavorId: "",
- Name: "",
- Network: sqlserverflexbetaPkgGen.CreateInstanceRequestPayloadNetwork{},
- RetentionDays: 0,
- Storage: sqlserverflexbetaPkgGen.StorageCreate{},
- Version: "",
- },
- wantErr: false,
- },
- {
- name: "nil object",
- args: args{
- ctx: context.Background(),
- model: &sqlserverflexbetaRs.InstanceModel{
- Encryption: sqlserverflexbetaRs.NewEncryptionValueNull(),
- Storage: sqlserverflexbetaRs.StorageValue{},
- },
- },
- want: &sqlserverflexbetaPkgGen.CreateInstanceRequestPayload{
- BackupSchedule: "",
- Encryption: nil,
- FlavorId: "",
- Name: "",
- Network: sqlserverflexbetaPkgGen.CreateInstanceRequestPayloadNetwork{},
- RetentionDays: 0,
- Storage: sqlserverflexbetaPkgGen.StorageCreate{},
- Version: "",
- },
- wantErr: false,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.name, func(t *testing.T) {
- got, err := toCreatePayload(tt.args.ctx, tt.args.model)
- if (err != nil) != tt.wantErr {
- t.Errorf("toCreatePayload() error = %v, wantErr %v", err, tt.wantErr)
- return
- }
- if diff := cmp.Diff(tt.want, got); diff != "" {
- t.Errorf("model mismatch (-want +got):\n%s", diff)
- }
- },
- )
- }
-}
-
-func Test_toUpdatePayload(t *testing.T) {
- type args struct {
- ctx context.Context
- m *sqlserverflexbetaRs.InstanceModel
- resp *resource.UpdateResponse
- }
- tests := []struct {
- name string
- args args
- want *sqlserverflexbetaPkgGen.UpdateInstanceRequestPayload
- wantErr bool
- }{
- // TODO: Add test cases.
- }
- for _, tt := range tests {
- t.Run(
- tt.name, func(t *testing.T) {
- got, err := toUpdatePayload(tt.args.ctx, tt.args.m, tt.args.resp)
- if (err != nil) != tt.wantErr {
- t.Errorf("toUpdatePayload() error = %v, wantErr %v", err, tt.wantErr)
- return
- }
- if !reflect.DeepEqual(got, tt.want) {
- t.Errorf("toUpdatePayload() got = %v, want %v", got, tt.want)
- }
- },
- )
- }
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/instance/planModifiers.yaml b/stackit/internal/services/sqlserverflexbeta/instance/planModifiers.yaml
deleted file mode 100644
index 71d4cbe4..00000000
--- a/stackit/internal/services/sqlserverflexbeta/instance/planModifiers.yaml
+++ /dev/null
@@ -1,124 +0,0 @@
-fields:
- - name: 'id'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'instance_id'
- validators:
- - validate.NoSeparator
- - validate.UUID
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'project_id'
- validators:
- - validate.NoSeparator
- - validate.UUID
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'name'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'backup_schedule'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'encryption.kek_key_id'
- validators:
- - validate.NoSeparator
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'encryption.kek_key_version'
- validators:
- - validate.NoSeparator
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'encryption.kek_key_ring_id'
- validators:
- - validate.NoSeparator
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'encryption.service_account'
- validators:
- - validate.NoSeparator
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'network.access_scope'
- validators:
- - validate.NoSeparator
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'network.acl'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'network.instance_address'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'network.router_address'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'status'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'region'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'retention_days'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'edition'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'version'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'replicas'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'storage'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'storage.class'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'storage.size'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'flavor_id'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'is_deletable'
- modifiers:
- - 'UseStateForUnknown'
diff --git a/stackit/internal/services/sqlserverflexbeta/instance/resource.go b/stackit/internal/services/sqlserverflexbeta/instance/resource.go
deleted file mode 100644
index a824aabf..00000000
--- a/stackit/internal/services/sqlserverflexbeta/instance/resource.go
+++ /dev/null
@@ -1,546 +0,0 @@
-package sqlserverflexbeta
-
-import (
- "context"
- _ "embed"
- "fmt"
- "net/http"
- "strings"
- "time"
-
- "github.com/hashicorp/terraform-plugin-framework/path"
- "github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
- "github.com/stackitcloud/stackit-sdk-go/core/oapierror"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- wait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexbeta"
-
- "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3beta1api"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-
- sqlserverflexbetaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance/resources_gen"
-)
-
-var (
- _ resource.Resource = &instanceResource{}
- _ resource.ResourceWithConfigure = &instanceResource{}
- _ resource.ResourceWithImportState = &instanceResource{}
- _ resource.ResourceWithModifyPlan = &instanceResource{}
- _ resource.ResourceWithIdentity = &instanceResource{}
-)
-
-func NewInstanceResource() resource.Resource {
- return &instanceResource{}
-}
-
-type instanceResource struct {
- client *v3beta1api.APIClient
- providerData core.ProviderData
-}
-
-// resourceModel describes the resource data model.
-type resourceModel = sqlserverflexbetaResGen.InstanceModel
-
-type InstanceResourceIdentityModel struct {
- ProjectID types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- InstanceID types.String `tfsdk:"instance_id"`
-}
-
-func (r *instanceResource) Metadata(
- _ context.Context,
- req resource.MetadataRequest,
- resp *resource.MetadataResponse,
-) {
- resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_instance"
-}
-
-//go:embed planModifiers.yaml
-var modifiersFileByte []byte
-
-func (r *instanceResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
- s := sqlserverflexbetaResGen.InstanceResourceSchema(ctx)
-
- fields, err := utils.ReadModifiersConfig(modifiersFileByte)
- if err != nil {
- resp.Diagnostics.AddError("error during read modifiers config file", err.Error())
- return
- }
-
- err = utils.AddPlanModifiersToResourceSchema(fields, &s)
- if err != nil {
- resp.Diagnostics.AddError("error adding plan modifiers", err.Error())
- return
- }
- resp.Schema = s
-}
-
-func (r *instanceResource) IdentitySchema(
- _ context.Context,
- _ resource.IdentitySchemaRequest,
- resp *resource.IdentitySchemaResponse,
-) {
- resp.IdentitySchema = identityschema.Schema{
- Attributes: map[string]identityschema.Attribute{
- "project_id": identityschema.StringAttribute{
- RequiredForImport: true, // must be set during import by the practitioner
- },
- "region": identityschema.StringAttribute{
- RequiredForImport: true, // can be defaulted by the provider configuration
- },
- "instance_id": identityschema.StringAttribute{
- RequiredForImport: true, // can be defaulted by the provider configuration
- },
- },
- }
-}
-
-// Configure adds the provider configured client to the resource.
-func (r *instanceResource) Configure(
- ctx context.Context,
- req resource.ConfigureRequest,
- resp *resource.ConfigureResponse,
-) {
- var ok bool
- r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
- if !ok {
- return
- }
-
- apiClientConfigOptions := []config.ConfigurationOption{
- config.WithCustomAuth(r.providerData.RoundTripper),
- utils.UserAgentConfigOption(r.providerData.Version),
- }
- if r.providerData.SQLServerFlexCustomEndpoint != "" {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithEndpoint(r.providerData.SQLServerFlexCustomEndpoint),
- )
- } else {
- apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(r.providerData.GetRegion()))
- }
- apiClient, err := v3beta1api.NewAPIClient(apiClientConfigOptions...)
- if err != nil {
- resp.Diagnostics.AddError(
- "Error configuring API client",
- fmt.Sprintf(
- "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
- err,
- ),
- )
- return
- }
- r.client = apiClient
- tflog.Info(ctx, "sqlserverflexbeta.Instance client configured")
-}
-
-// ModifyPlan implements resource.ResourceWithModifyPlan.
-// Use the modifier to set the effective region in the current plan.
-func (r *instanceResource) ModifyPlan(
- ctx context.Context,
- req resource.ModifyPlanRequest,
- resp *resource.ModifyPlanResponse,
-) { // nolint:gocritic // function signature required by Terraform
- // skip initial empty configuration to avoid follow-up errors
- if req.Config.Raw.IsNull() {
- return
- }
- var configModel resourceModel
- resp.Diagnostics.Append(req.Config.Get(ctx, &configModel)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- if req.Plan.Raw.IsNull() {
- return
- }
- var planModel resourceModel
- resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- utils.AdaptRegion(ctx, configModel.Region, &planModel.Region, r.providerData.GetRegion(), resp)
- if resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(resp.Plan.Set(ctx, planModel)...)
- if resp.Diagnostics.HasError() {
- return
- }
-}
-
-func (r *instanceResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
- var data resourceModel
- crateErr := "[SQL Server Flex BETA - Create] error"
-
- // Read Terraform plan data into the model
- resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := data.ProjectId.ValueString()
- region := data.Region.ValueString()
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
-
- // Generate API request body from model
- payload, err := toCreatePayload(ctx, &data)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- crateErr,
- fmt.Sprintf("Creating API payload: %v", err),
- )
- return
- }
-
- // Create new Instance
- createResp, err := r.client.DefaultAPI.CreateInstanceRequest(
- ctx,
- projectId,
- region,
- ).CreateInstanceRequestPayload(*payload).Execute()
- if err != nil {
- core.LogAndAddError(ctx, &resp.Diagnostics, crateErr, fmt.Sprintf("Calling API: %v", err))
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- instanceId := createResp.Id
- data.InstanceId = types.StringValue(instanceId)
-
- identity := InstanceResourceIdentityModel{
- ProjectID: types.StringValue(projectId),
- Region: types.StringValue(region),
- InstanceID: types.StringValue(instanceId),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- waitResp, err := wait.CreateInstanceWaitHandler(
- ctx,
- r.client.DefaultAPI,
- projectId,
- instanceId,
- region,
- ).SetSleepBeforeWait(
- 10 * time.Second,
- ).SetTimeout(
- 90 * time.Minute,
- ).WaitWithContext(ctx)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- crateErr,
- fmt.Sprintf("Instance creation waiting: %v", err),
- )
- return
- }
-
- if waitResp.Id == "" {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- crateErr,
- "Instance creation waiting: returned id is nil",
- )
- return
- }
-
- // Map response body to schema
- err = mapResponseToModel(ctx, waitResp, &data, resp.Diagnostics)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- crateErr,
- fmt.Sprintf("processing API payload: %v", err),
- )
- return
- }
-
- // Save data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-
- tflog.Info(ctx, "sqlserverflexbeta.Instance created")
-}
-
-func (r *instanceResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
- var data resourceModel
-
- // Read Terraform prior state data into the model
- resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := data.ProjectId.ValueString()
- region := data.Region.ValueString()
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
-
- instanceId := data.InstanceId.ValueString()
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
-
- instanceResp, err := r.client.DefaultAPI.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
- if err != nil {
- oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
- if ok && oapiErr.StatusCode == http.StatusNotFound {
- resp.State.RemoveResource(ctx)
- return
- }
- core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading instance", err.Error())
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- // Map response body to schema
- err = mapResponseToModel(ctx, instanceResp, &data, resp.Diagnostics)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error reading instance",
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
-
- // Save identity into Terraform state
- identity := InstanceResourceIdentityModel{
- ProjectID: types.StringValue(projectId),
- Region: types.StringValue(region),
- InstanceID: types.StringValue(instanceId),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- // Save updated data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- tflog.Info(ctx, "sqlserverflexbeta.Instance read")
-}
-
-func (r *instanceResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
- var data resourceModel
- updateInstanceError := "Error updating instance"
-
- resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := data.ProjectId.ValueString()
- region := data.Region.ValueString()
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
-
- instanceId := data.InstanceId.ValueString()
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
-
- // Generate API request body from model
- payload, err := toUpdatePayload(ctx, &data, resp)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- updateInstanceError,
- fmt.Sprintf("Creating API payload: %v", err),
- )
- return
- }
- // Update existing instance
- err = r.client.DefaultAPI.UpdateInstanceRequest(
- ctx,
- projectId,
- region,
- instanceId,
- ).UpdateInstanceRequestPayload(*payload).Execute()
- if err != nil {
- core.LogAndAddError(ctx, &resp.Diagnostics, updateInstanceError, err.Error())
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- waitResp, err := wait.
- UpdateInstanceWaitHandler(ctx, r.client.DefaultAPI, projectId, instanceId, region).
- SetSleepBeforeWait(15 * time.Second).
- SetTimeout(45 * time.Minute).
- WaitWithContext(ctx)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- updateInstanceError,
- fmt.Sprintf("Instance update waiting: %v", err),
- )
- return
- }
-
- // Map response body to schema
- err = mapResponseToModel(ctx, waitResp, &data, resp.Diagnostics)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- updateInstanceError,
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
-
- identity := InstanceResourceIdentityModel{
- ProjectID: types.StringValue(projectId),
- Region: types.StringValue(region),
- InstanceID: types.StringValue(instanceId),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- // Save updated data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- tflog.Info(ctx, "sqlserverflexbeta.Instance updated")
-}
-
-func (r *instanceResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
- var data resourceModel
-
- // Read Terraform prior state data into the model
- resp.Diagnostics.Append(req.State.Get(ctx, &data)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- // Read identity data
- var identityData InstanceResourceIdentityModel
- resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := identityData.ProjectID.ValueString()
- region := identityData.Region.ValueString()
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "region", region)
-
- instanceId := identityData.InstanceID.ValueString()
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
-
- // Delete existing instance
- err := r.client.DefaultAPI.DeleteInstanceRequest(ctx, projectId, region, instanceId).Execute()
- if err != nil {
- core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting instance", fmt.Sprintf("Calling API: %v", err))
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- delResp, err := wait.DeleteInstanceWaitHandler(ctx, r.client.DefaultAPI, projectId, instanceId, region).WaitWithContext(ctx)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error deleting instance",
- fmt.Sprintf("Instance deletion waiting: %v", err),
- )
- return
- }
-
- if delResp != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error deleting instance",
- "wait handler returned non nil result",
- )
- return
- }
-
- resp.State.RemoveResource(ctx)
-
- tflog.Info(ctx, "sqlserverflexbeta.Instance deleted")
-}
-
-// ImportState imports a resource into the Terraform state on success.
-// The expected format of the resource import identifier is: project_id,zone_id,record_set_id
-func (r *instanceResource) ImportState(
- ctx context.Context,
- req resource.ImportStateRequest,
- resp *resource.ImportStateResponse,
-) {
- ctx = core.InitProviderContext(ctx)
-
- if req.ID != "" {
- idParts := strings.Split(req.ID, core.Separator)
-
- if len(idParts) != 3 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" {
- core.LogAndAddError(
- ctx, &resp.Diagnostics,
- "Error importing instance",
- fmt.Sprintf(
- "Expected import identifier with format [project_id],[region],[instance_id] Got: %q",
- req.ID,
- ),
- )
- return
- }
-
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
- return
- }
-
- // If no ID is provided, attempt to read identity attributes from the import configuration
- var identityData InstanceResourceIdentityModel
- resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- projectId := identityData.ProjectID.ValueString()
- region := identityData.Region.ValueString()
- instanceId := identityData.InstanceID.ValueString()
-
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...)
-
- tflog.Info(ctx, "Sqlserverflexbeta instance state imported")
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/instance/resources_gen/instance_resource_gen.go b/stackit/internal/services/sqlserverflexbeta/instance/resources_gen/instance_resource_gen.go
deleted file mode 100644
index f8865ae5..00000000
--- a/stackit/internal/services/sqlserverflexbeta/instance/resources_gen/instance_resource_gen.go
+++ /dev/null
@@ -1,1597 +0,0 @@
-// Code generated by terraform-plugin-framework-generator DO NOT EDIT.
-
-package sqlserverflexbeta
-
-import (
- "context"
- "fmt"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringdefault"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
- "github.com/hashicorp/terraform-plugin-go/tftypes"
- "strings"
-
- "github.com/hashicorp/terraform-plugin-framework/resource/schema"
-)
-
-func InstanceResourceSchema(ctx context.Context) schema.Schema {
- return schema.Schema{
- Attributes: map[string]schema.Attribute{
- "backup_schedule": schema.StringAttribute{
- Required: true,
- Description: "The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.",
- MarkdownDescription: "The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.",
- },
- "edition": schema.StringAttribute{
- Computed: true,
- Description: "Edition of the MSSQL server instance",
- MarkdownDescription: "Edition of the MSSQL server instance",
- },
- "encryption": schema.SingleNestedAttribute{
- Attributes: map[string]schema.Attribute{
- "kek_key_id": schema.StringAttribute{
- Required: true,
- Description: "The key identifier",
- MarkdownDescription: "The key identifier",
- },
- "kek_key_ring_id": schema.StringAttribute{
- Required: true,
- Description: "The keyring identifier",
- MarkdownDescription: "The keyring identifier",
- },
- "kek_key_version": schema.StringAttribute{
- Required: true,
- Description: "The key version",
- MarkdownDescription: "The key version",
- },
- "service_account": schema.StringAttribute{
- Required: true,
- },
- },
- CustomType: EncryptionType{
- ObjectType: types.ObjectType{
- AttrTypes: EncryptionValue{}.AttributeTypes(ctx),
- },
- },
- Optional: true,
- Computed: true,
- Description: "this defines which key to use for storage encryption",
- MarkdownDescription: "this defines which key to use for storage encryption",
- },
- "flavor_id": schema.StringAttribute{
- Required: true,
- Description: "The id of the instance flavor.",
- MarkdownDescription: "The id of the instance flavor.",
- },
- "id": schema.StringAttribute{
- Computed: true,
- Description: "The ID of the instance.",
- MarkdownDescription: "The ID of the instance.",
- },
- "instance_id": schema.StringAttribute{
- Optional: true,
- Computed: true,
- Description: "The ID of the instance.",
- MarkdownDescription: "The ID of the instance.",
- },
- "is_deletable": schema.BoolAttribute{
- Computed: true,
- Description: "Whether the instance can be deleted or not.",
- MarkdownDescription: "Whether the instance can be deleted or not.",
- },
- "name": schema.StringAttribute{
- Required: true,
- Description: "The name of the instance.",
- MarkdownDescription: "The name of the instance.",
- },
- "network": schema.SingleNestedAttribute{
- Attributes: map[string]schema.Attribute{
- "access_scope": schema.StringAttribute{
- Optional: true,
- Computed: true,
- Description: "The network access scope of the instance\n\n⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.\n",
- MarkdownDescription: "The network access scope of the instance\n\n⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.\n",
- Validators: []validator.String{
- stringvalidator.OneOf(
- "PUBLIC",
- "SNA",
- ),
- },
- Default: stringdefault.StaticString("PUBLIC"),
- },
- "acl": schema.ListAttribute{
- ElementType: types.StringType,
- Required: true,
- Description: "List of IPV4 cidr.",
- MarkdownDescription: "List of IPV4 cidr.",
- },
- "instance_address": schema.StringAttribute{
- Computed: true,
- },
- "router_address": schema.StringAttribute{
- Computed: true,
- },
- },
- CustomType: NetworkType{
- ObjectType: types.ObjectType{
- AttrTypes: NetworkValue{}.AttributeTypes(ctx),
- },
- },
- Required: true,
- Description: "the network configuration of the instance.",
- MarkdownDescription: "the network configuration of the instance.",
- },
- "project_id": schema.StringAttribute{
- Optional: true,
- Computed: true,
- Description: "The STACKIT project ID.",
- MarkdownDescription: "The STACKIT project ID.",
- },
- "region": schema.StringAttribute{
- Optional: true,
- Computed: true,
- Description: "The region which should be addressed",
- MarkdownDescription: "The region which should be addressed",
- Validators: []validator.String{
- stringvalidator.OneOf(
- "eu01",
- ),
- },
- },
- "replicas": schema.Int64Attribute{
- Computed: true,
- Description: "How many replicas the instance should have.",
- MarkdownDescription: "How many replicas the instance should have.",
- },
- "retention_days": schema.Int64Attribute{
- Required: true,
- Description: "The days for how long the backup files should be stored before cleaned up. 30 to 365",
- MarkdownDescription: "The days for how long the backup files should be stored before cleaned up. 30 to 365",
- },
- "status": schema.StringAttribute{
- Computed: true,
- },
- "storage": schema.SingleNestedAttribute{
- Attributes: map[string]schema.Attribute{
- "class": schema.StringAttribute{
- Required: true,
- Description: "The storage class for the storage.",
- MarkdownDescription: "The storage class for the storage.",
- },
- "size": schema.Int64Attribute{
- Required: true,
- Description: "The storage size in Gigabytes.",
- MarkdownDescription: "The storage size in Gigabytes.",
- },
- },
- CustomType: StorageType{
- ObjectType: types.ObjectType{
- AttrTypes: StorageValue{}.AttributeTypes(ctx),
- },
- },
- Required: true,
- Description: "The object containing information about the storage size and class.",
- MarkdownDescription: "The object containing information about the storage size and class.",
- },
- "version": schema.StringAttribute{
- Required: true,
- Description: "The sqlserver version used for the instance.",
- MarkdownDescription: "The sqlserver version used for the instance.",
- Validators: []validator.String{
- stringvalidator.OneOf(
- "2022",
- ),
- },
- },
- },
- }
-}
-
-type InstanceModel struct {
- BackupSchedule types.String `tfsdk:"backup_schedule"`
- Edition types.String `tfsdk:"edition"`
- Encryption EncryptionValue `tfsdk:"encryption"`
- FlavorId types.String `tfsdk:"flavor_id"`
- Id types.String `tfsdk:"id"`
- InstanceId types.String `tfsdk:"instance_id"`
- IsDeletable types.Bool `tfsdk:"is_deletable"`
- Name types.String `tfsdk:"name"`
- Network NetworkValue `tfsdk:"network"`
- ProjectId types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- Replicas types.Int64 `tfsdk:"replicas"`
- RetentionDays types.Int64 `tfsdk:"retention_days"`
- Status types.String `tfsdk:"status"`
- Storage StorageValue `tfsdk:"storage"`
- Version types.String `tfsdk:"version"`
-}
-
-var _ basetypes.ObjectTypable = EncryptionType{}
-
-type EncryptionType struct {
- basetypes.ObjectType
-}
-
-func (t EncryptionType) Equal(o attr.Type) bool {
- other, ok := o.(EncryptionType)
-
- if !ok {
- return false
- }
-
- return t.ObjectType.Equal(other.ObjectType)
-}
-
-func (t EncryptionType) String() string {
- return "EncryptionType"
-}
-
-func (t EncryptionType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributes := in.Attributes()
-
- kekKeyIdAttribute, ok := attributes["kek_key_id"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `kek_key_id is missing from object`)
-
- return nil, diags
- }
-
- kekKeyIdVal, ok := kekKeyIdAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`kek_key_id expected to be basetypes.StringValue, was: %T`, kekKeyIdAttribute))
- }
-
- kekKeyRingIdAttribute, ok := attributes["kek_key_ring_id"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `kek_key_ring_id is missing from object`)
-
- return nil, diags
- }
-
- kekKeyRingIdVal, ok := kekKeyRingIdAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`kek_key_ring_id expected to be basetypes.StringValue, was: %T`, kekKeyRingIdAttribute))
- }
-
- kekKeyVersionAttribute, ok := attributes["kek_key_version"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `kek_key_version is missing from object`)
-
- return nil, diags
- }
-
- kekKeyVersionVal, ok := kekKeyVersionAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`kek_key_version expected to be basetypes.StringValue, was: %T`, kekKeyVersionAttribute))
- }
-
- serviceAccountAttribute, ok := attributes["service_account"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `service_account is missing from object`)
-
- return nil, diags
- }
-
- serviceAccountVal, ok := serviceAccountAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`service_account expected to be basetypes.StringValue, was: %T`, serviceAccountAttribute))
- }
-
- if diags.HasError() {
- return nil, diags
- }
-
- return EncryptionValue{
- KekKeyId: kekKeyIdVal,
- KekKeyRingId: kekKeyRingIdVal,
- KekKeyVersion: kekKeyVersionVal,
- ServiceAccount: serviceAccountVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewEncryptionValueNull() EncryptionValue {
- return EncryptionValue{
- state: attr.ValueStateNull,
- }
-}
-
-func NewEncryptionValueUnknown() EncryptionValue {
- return EncryptionValue{
- state: attr.ValueStateUnknown,
- }
-}
-
-func NewEncryptionValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (EncryptionValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
- ctx := context.Background()
-
- for name, attributeType := range attributeTypes {
- attribute, ok := attributes[name]
-
- if !ok {
- diags.AddError(
- "Missing EncryptionValue Attribute Value",
- "While creating a EncryptionValue value, a missing attribute value was detected. "+
- "A EncryptionValue must contain values for all attributes, even if null or unknown. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("EncryptionValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
- )
-
- continue
- }
-
- if !attributeType.Equal(attribute.Type(ctx)) {
- diags.AddError(
- "Invalid EncryptionValue Attribute Type",
- "While creating a EncryptionValue value, an invalid attribute value was detected. "+
- "A EncryptionValue must use a matching attribute type for the value. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("EncryptionValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
- fmt.Sprintf("EncryptionValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
- )
- }
- }
-
- for name := range attributes {
- _, ok := attributeTypes[name]
-
- if !ok {
- diags.AddError(
- "Extra EncryptionValue Attribute Value",
- "While creating a EncryptionValue value, an extra attribute value was detected. "+
- "A EncryptionValue must not contain values beyond the expected attribute types. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("Extra EncryptionValue Attribute Name: %s", name),
- )
- }
- }
-
- if diags.HasError() {
- return NewEncryptionValueUnknown(), diags
- }
-
- kekKeyIdAttribute, ok := attributes["kek_key_id"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `kek_key_id is missing from object`)
-
- return NewEncryptionValueUnknown(), diags
- }
-
- kekKeyIdVal, ok := kekKeyIdAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`kek_key_id expected to be basetypes.StringValue, was: %T`, kekKeyIdAttribute))
- }
-
- kekKeyRingIdAttribute, ok := attributes["kek_key_ring_id"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `kek_key_ring_id is missing from object`)
-
- return NewEncryptionValueUnknown(), diags
- }
-
- kekKeyRingIdVal, ok := kekKeyRingIdAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`kek_key_ring_id expected to be basetypes.StringValue, was: %T`, kekKeyRingIdAttribute))
- }
-
- kekKeyVersionAttribute, ok := attributes["kek_key_version"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `kek_key_version is missing from object`)
-
- return NewEncryptionValueUnknown(), diags
- }
-
- kekKeyVersionVal, ok := kekKeyVersionAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`kek_key_version expected to be basetypes.StringValue, was: %T`, kekKeyVersionAttribute))
- }
-
- serviceAccountAttribute, ok := attributes["service_account"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `service_account is missing from object`)
-
- return NewEncryptionValueUnknown(), diags
- }
-
- serviceAccountVal, ok := serviceAccountAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`service_account expected to be basetypes.StringValue, was: %T`, serviceAccountAttribute))
- }
-
- if diags.HasError() {
- return NewEncryptionValueUnknown(), diags
- }
-
- return EncryptionValue{
- KekKeyId: kekKeyIdVal,
- KekKeyRingId: kekKeyRingIdVal,
- KekKeyVersion: kekKeyVersionVal,
- ServiceAccount: serviceAccountVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewEncryptionValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) EncryptionValue {
- object, diags := NewEncryptionValue(attributeTypes, attributes)
-
- if diags.HasError() {
- // This could potentially be added to the diag package.
- diagsStrings := make([]string, 0, len(diags))
-
- for _, diagnostic := range diags {
- diagsStrings = append(diagsStrings, fmt.Sprintf(
- "%s | %s | %s",
- diagnostic.Severity(),
- diagnostic.Summary(),
- diagnostic.Detail()))
- }
-
- panic("NewEncryptionValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
- }
-
- return object
-}
-
-func (t EncryptionType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
- if in.Type() == nil {
- return NewEncryptionValueNull(), nil
- }
-
- if !in.Type().Equal(t.TerraformType(ctx)) {
- return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
- }
-
- if !in.IsKnown() {
- return NewEncryptionValueUnknown(), nil
- }
-
- if in.IsNull() {
- return NewEncryptionValueNull(), nil
- }
-
- attributes := map[string]attr.Value{}
-
- val := map[string]tftypes.Value{}
-
- err := in.As(&val)
-
- if err != nil {
- return nil, err
- }
-
- for k, v := range val {
- a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
-
- if err != nil {
- return nil, err
- }
-
- attributes[k] = a
- }
-
- return NewEncryptionValueMust(EncryptionValue{}.AttributeTypes(ctx), attributes), nil
-}
-
-func (t EncryptionType) ValueType(ctx context.Context) attr.Value {
- return EncryptionValue{}
-}
-
-var _ basetypes.ObjectValuable = EncryptionValue{}
-
-type EncryptionValue struct {
- KekKeyId basetypes.StringValue `tfsdk:"kek_key_id"`
- KekKeyRingId basetypes.StringValue `tfsdk:"kek_key_ring_id"`
- KekKeyVersion basetypes.StringValue `tfsdk:"kek_key_version"`
- ServiceAccount basetypes.StringValue `tfsdk:"service_account"`
- state attr.ValueState
-}
-
-func (v EncryptionValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 4)
-
- var val tftypes.Value
- var err error
-
- attrTypes["kek_key_id"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["kek_key_ring_id"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["kek_key_version"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["service_account"] = basetypes.StringType{}.TerraformType(ctx)
-
- objectType := tftypes.Object{AttributeTypes: attrTypes}
-
- switch v.state {
- case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 4)
-
- val, err = v.KekKeyId.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["kek_key_id"] = val
-
- val, err = v.KekKeyRingId.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["kek_key_ring_id"] = val
-
- val, err = v.KekKeyVersion.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["kek_key_version"] = val
-
- val, err = v.ServiceAccount.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["service_account"] = val
-
- if err := tftypes.ValidateValue(objectType, vals); err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- return tftypes.NewValue(objectType, vals), nil
- case attr.ValueStateNull:
- return tftypes.NewValue(objectType, nil), nil
- case attr.ValueStateUnknown:
- return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
- default:
- panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
- }
-}
-
-func (v EncryptionValue) IsNull() bool {
- return v.state == attr.ValueStateNull
-}
-
-func (v EncryptionValue) IsUnknown() bool {
- return v.state == attr.ValueStateUnknown
-}
-
-func (v EncryptionValue) String() string {
- return "EncryptionValue"
-}
-
-func (v EncryptionValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributeTypes := map[string]attr.Type{
- "kek_key_id": basetypes.StringType{},
- "kek_key_ring_id": basetypes.StringType{},
- "kek_key_version": basetypes.StringType{},
- "service_account": basetypes.StringType{},
- }
-
- if v.IsNull() {
- return types.ObjectNull(attributeTypes), diags
- }
-
- if v.IsUnknown() {
- return types.ObjectUnknown(attributeTypes), diags
- }
-
- objVal, diags := types.ObjectValue(
- attributeTypes,
- map[string]attr.Value{
- "kek_key_id": v.KekKeyId,
- "kek_key_ring_id": v.KekKeyRingId,
- "kek_key_version": v.KekKeyVersion,
- "service_account": v.ServiceAccount,
- })
-
- return objVal, diags
-}
-
-func (v EncryptionValue) Equal(o attr.Value) bool {
- other, ok := o.(EncryptionValue)
-
- if !ok {
- return false
- }
-
- if v.state != other.state {
- return false
- }
-
- if v.state != attr.ValueStateKnown {
- return true
- }
-
- if !v.KekKeyId.Equal(other.KekKeyId) {
- return false
- }
-
- if !v.KekKeyRingId.Equal(other.KekKeyRingId) {
- return false
- }
-
- if !v.KekKeyVersion.Equal(other.KekKeyVersion) {
- return false
- }
-
- if !v.ServiceAccount.Equal(other.ServiceAccount) {
- return false
- }
-
- return true
-}
-
-func (v EncryptionValue) Type(ctx context.Context) attr.Type {
- return EncryptionType{
- basetypes.ObjectType{
- AttrTypes: v.AttributeTypes(ctx),
- },
- }
-}
-
-func (v EncryptionValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
- return map[string]attr.Type{
- "kek_key_id": basetypes.StringType{},
- "kek_key_ring_id": basetypes.StringType{},
- "kek_key_version": basetypes.StringType{},
- "service_account": basetypes.StringType{},
- }
-}
-
-var _ basetypes.ObjectTypable = NetworkType{}
-
-type NetworkType struct {
- basetypes.ObjectType
-}
-
-func (t NetworkType) Equal(o attr.Type) bool {
- other, ok := o.(NetworkType)
-
- if !ok {
- return false
- }
-
- return t.ObjectType.Equal(other.ObjectType)
-}
-
-func (t NetworkType) String() string {
- return "NetworkType"
-}
-
-func (t NetworkType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributes := in.Attributes()
-
- accessScopeAttribute, ok := attributes["access_scope"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `access_scope is missing from object`)
-
- return nil, diags
- }
-
- accessScopeVal, ok := accessScopeAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`access_scope expected to be basetypes.StringValue, was: %T`, accessScopeAttribute))
- }
-
- aclAttribute, ok := attributes["acl"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `acl is missing from object`)
-
- return nil, diags
- }
-
- aclVal, ok := aclAttribute.(basetypes.ListValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`acl expected to be basetypes.ListValue, was: %T`, aclAttribute))
- }
-
- instanceAddressAttribute, ok := attributes["instance_address"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `instance_address is missing from object`)
-
- return nil, diags
- }
-
- instanceAddressVal, ok := instanceAddressAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`instance_address expected to be basetypes.StringValue, was: %T`, instanceAddressAttribute))
- }
-
- routerAddressAttribute, ok := attributes["router_address"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `router_address is missing from object`)
-
- return nil, diags
- }
-
- routerAddressVal, ok := routerAddressAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`router_address expected to be basetypes.StringValue, was: %T`, routerAddressAttribute))
- }
-
- if diags.HasError() {
- return nil, diags
- }
-
- return NetworkValue{
- AccessScope: accessScopeVal,
- Acl: aclVal,
- InstanceAddress: instanceAddressVal,
- RouterAddress: routerAddressVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewNetworkValueNull() NetworkValue {
- return NetworkValue{
- state: attr.ValueStateNull,
- }
-}
-
-func NewNetworkValueUnknown() NetworkValue {
- return NetworkValue{
- state: attr.ValueStateUnknown,
- }
-}
-
-func NewNetworkValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (NetworkValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
- ctx := context.Background()
-
- for name, attributeType := range attributeTypes {
- attribute, ok := attributes[name]
-
- if !ok {
- diags.AddError(
- "Missing NetworkValue Attribute Value",
- "While creating a NetworkValue value, a missing attribute value was detected. "+
- "A NetworkValue must contain values for all attributes, even if null or unknown. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("NetworkValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
- )
-
- continue
- }
-
- if !attributeType.Equal(attribute.Type(ctx)) {
- diags.AddError(
- "Invalid NetworkValue Attribute Type",
- "While creating a NetworkValue value, an invalid attribute value was detected. "+
- "A NetworkValue must use a matching attribute type for the value. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("NetworkValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
- fmt.Sprintf("NetworkValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
- )
- }
- }
-
- for name := range attributes {
- _, ok := attributeTypes[name]
-
- if !ok {
- diags.AddError(
- "Extra NetworkValue Attribute Value",
- "While creating a NetworkValue value, an extra attribute value was detected. "+
- "A NetworkValue must not contain values beyond the expected attribute types. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("Extra NetworkValue Attribute Name: %s", name),
- )
- }
- }
-
- if diags.HasError() {
- return NewNetworkValueUnknown(), diags
- }
-
- accessScopeAttribute, ok := attributes["access_scope"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `access_scope is missing from object`)
-
- return NewNetworkValueUnknown(), diags
- }
-
- accessScopeVal, ok := accessScopeAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`access_scope expected to be basetypes.StringValue, was: %T`, accessScopeAttribute))
- }
-
- aclAttribute, ok := attributes["acl"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `acl is missing from object`)
-
- return NewNetworkValueUnknown(), diags
- }
-
- aclVal, ok := aclAttribute.(basetypes.ListValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`acl expected to be basetypes.ListValue, was: %T`, aclAttribute))
- }
-
- instanceAddressAttribute, ok := attributes["instance_address"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `instance_address is missing from object`)
-
- return NewNetworkValueUnknown(), diags
- }
-
- instanceAddressVal, ok := instanceAddressAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`instance_address expected to be basetypes.StringValue, was: %T`, instanceAddressAttribute))
- }
-
- routerAddressAttribute, ok := attributes["router_address"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `router_address is missing from object`)
-
- return NewNetworkValueUnknown(), diags
- }
-
- routerAddressVal, ok := routerAddressAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`router_address expected to be basetypes.StringValue, was: %T`, routerAddressAttribute))
- }
-
- if diags.HasError() {
- return NewNetworkValueUnknown(), diags
- }
-
- return NetworkValue{
- AccessScope: accessScopeVal,
- Acl: aclVal,
- InstanceAddress: instanceAddressVal,
- RouterAddress: routerAddressVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewNetworkValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) NetworkValue {
- object, diags := NewNetworkValue(attributeTypes, attributes)
-
- if diags.HasError() {
- // This could potentially be added to the diag package.
- diagsStrings := make([]string, 0, len(diags))
-
- for _, diagnostic := range diags {
- diagsStrings = append(diagsStrings, fmt.Sprintf(
- "%s | %s | %s",
- diagnostic.Severity(),
- diagnostic.Summary(),
- diagnostic.Detail()))
- }
-
- panic("NewNetworkValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
- }
-
- return object
-}
-
-func (t NetworkType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
- if in.Type() == nil {
- return NewNetworkValueNull(), nil
- }
-
- if !in.Type().Equal(t.TerraformType(ctx)) {
- return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
- }
-
- if !in.IsKnown() {
- return NewNetworkValueUnknown(), nil
- }
-
- if in.IsNull() {
- return NewNetworkValueNull(), nil
- }
-
- attributes := map[string]attr.Value{}
-
- val := map[string]tftypes.Value{}
-
- err := in.As(&val)
-
- if err != nil {
- return nil, err
- }
-
- for k, v := range val {
- a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
-
- if err != nil {
- return nil, err
- }
-
- attributes[k] = a
- }
-
- return NewNetworkValueMust(NetworkValue{}.AttributeTypes(ctx), attributes), nil
-}
-
-func (t NetworkType) ValueType(ctx context.Context) attr.Value {
- return NetworkValue{}
-}
-
-var _ basetypes.ObjectValuable = NetworkValue{}
-
-type NetworkValue struct {
- AccessScope basetypes.StringValue `tfsdk:"access_scope"`
- Acl basetypes.ListValue `tfsdk:"acl"`
- InstanceAddress basetypes.StringValue `tfsdk:"instance_address"`
- RouterAddress basetypes.StringValue `tfsdk:"router_address"`
- state attr.ValueState
-}
-
-func (v NetworkValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 4)
-
- var val tftypes.Value
- var err error
-
- attrTypes["access_scope"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["acl"] = basetypes.ListType{
- ElemType: types.StringType,
- }.TerraformType(ctx)
- attrTypes["instance_address"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["router_address"] = basetypes.StringType{}.TerraformType(ctx)
-
- objectType := tftypes.Object{AttributeTypes: attrTypes}
-
- switch v.state {
- case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 4)
-
- val, err = v.AccessScope.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["access_scope"] = val
-
- val, err = v.Acl.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["acl"] = val
-
- val, err = v.InstanceAddress.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["instance_address"] = val
-
- val, err = v.RouterAddress.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["router_address"] = val
-
- if err := tftypes.ValidateValue(objectType, vals); err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- return tftypes.NewValue(objectType, vals), nil
- case attr.ValueStateNull:
- return tftypes.NewValue(objectType, nil), nil
- case attr.ValueStateUnknown:
- return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
- default:
- panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
- }
-}
-
-func (v NetworkValue) IsNull() bool {
- return v.state == attr.ValueStateNull
-}
-
-func (v NetworkValue) IsUnknown() bool {
- return v.state == attr.ValueStateUnknown
-}
-
-func (v NetworkValue) String() string {
- return "NetworkValue"
-}
-
-func (v NetworkValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- var aclVal basetypes.ListValue
- switch {
- case v.Acl.IsUnknown():
- aclVal = types.ListUnknown(types.StringType)
- case v.Acl.IsNull():
- aclVal = types.ListNull(types.StringType)
- default:
- var d diag.Diagnostics
- aclVal, d = types.ListValue(types.StringType, v.Acl.Elements())
- diags.Append(d...)
- }
-
- if diags.HasError() {
- return types.ObjectUnknown(map[string]attr.Type{
- "access_scope": basetypes.StringType{},
- "acl": basetypes.ListType{
- ElemType: types.StringType,
- },
- "instance_address": basetypes.StringType{},
- "router_address": basetypes.StringType{},
- }), diags
- }
-
- attributeTypes := map[string]attr.Type{
- "access_scope": basetypes.StringType{},
- "acl": basetypes.ListType{
- ElemType: types.StringType,
- },
- "instance_address": basetypes.StringType{},
- "router_address": basetypes.StringType{},
- }
-
- if v.IsNull() {
- return types.ObjectNull(attributeTypes), diags
- }
-
- if v.IsUnknown() {
- return types.ObjectUnknown(attributeTypes), diags
- }
-
- objVal, diags := types.ObjectValue(
- attributeTypes,
- map[string]attr.Value{
- "access_scope": v.AccessScope,
- "acl": aclVal,
- "instance_address": v.InstanceAddress,
- "router_address": v.RouterAddress,
- })
-
- return objVal, diags
-}
-
-func (v NetworkValue) Equal(o attr.Value) bool {
- other, ok := o.(NetworkValue)
-
- if !ok {
- return false
- }
-
- if v.state != other.state {
- return false
- }
-
- if v.state != attr.ValueStateKnown {
- return true
- }
-
- if !v.AccessScope.Equal(other.AccessScope) {
- return false
- }
-
- if !v.Acl.Equal(other.Acl) {
- return false
- }
-
- if !v.InstanceAddress.Equal(other.InstanceAddress) {
- return false
- }
-
- if !v.RouterAddress.Equal(other.RouterAddress) {
- return false
- }
-
- return true
-}
-
-func (v NetworkValue) Type(ctx context.Context) attr.Type {
- return NetworkType{
- basetypes.ObjectType{
- AttrTypes: v.AttributeTypes(ctx),
- },
- }
-}
-
-func (v NetworkValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
- return map[string]attr.Type{
- "access_scope": basetypes.StringType{},
- "acl": basetypes.ListType{
- ElemType: types.StringType,
- },
- "instance_address": basetypes.StringType{},
- "router_address": basetypes.StringType{},
- }
-}
-
-var _ basetypes.ObjectTypable = StorageType{}
-
-type StorageType struct {
- basetypes.ObjectType
-}
-
-func (t StorageType) Equal(o attr.Type) bool {
- other, ok := o.(StorageType)
-
- if !ok {
- return false
- }
-
- return t.ObjectType.Equal(other.ObjectType)
-}
-
-func (t StorageType) String() string {
- return "StorageType"
-}
-
-func (t StorageType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributes := in.Attributes()
-
- classAttribute, ok := attributes["class"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `class is missing from object`)
-
- return nil, diags
- }
-
- classVal, ok := classAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute))
- }
-
- sizeAttribute, ok := attributes["size"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `size is missing from object`)
-
- return nil, diags
- }
-
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
- }
-
- if diags.HasError() {
- return nil, diags
- }
-
- return StorageValue{
- Class: classVal,
- Size: sizeVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewStorageValueNull() StorageValue {
- return StorageValue{
- state: attr.ValueStateNull,
- }
-}
-
-func NewStorageValueUnknown() StorageValue {
- return StorageValue{
- state: attr.ValueStateUnknown,
- }
-}
-
-func NewStorageValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (StorageValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
- ctx := context.Background()
-
- for name, attributeType := range attributeTypes {
- attribute, ok := attributes[name]
-
- if !ok {
- diags.AddError(
- "Missing StorageValue Attribute Value",
- "While creating a StorageValue value, a missing attribute value was detected. "+
- "A StorageValue must contain values for all attributes, even if null or unknown. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("StorageValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
- )
-
- continue
- }
-
- if !attributeType.Equal(attribute.Type(ctx)) {
- diags.AddError(
- "Invalid StorageValue Attribute Type",
- "While creating a StorageValue value, an invalid attribute value was detected. "+
- "A StorageValue must use a matching attribute type for the value. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("StorageValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
- fmt.Sprintf("StorageValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
- )
- }
- }
-
- for name := range attributes {
- _, ok := attributeTypes[name]
-
- if !ok {
- diags.AddError(
- "Extra StorageValue Attribute Value",
- "While creating a StorageValue value, an extra attribute value was detected. "+
- "A StorageValue must not contain values beyond the expected attribute types. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("Extra StorageValue Attribute Name: %s", name),
- )
- }
- }
-
- if diags.HasError() {
- return NewStorageValueUnknown(), diags
- }
-
- classAttribute, ok := attributes["class"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `class is missing from object`)
-
- return NewStorageValueUnknown(), diags
- }
-
- classVal, ok := classAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`class expected to be basetypes.StringValue, was: %T`, classAttribute))
- }
-
- sizeAttribute, ok := attributes["size"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `size is missing from object`)
-
- return NewStorageValueUnknown(), diags
- }
-
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
- }
-
- if diags.HasError() {
- return NewStorageValueUnknown(), diags
- }
-
- return StorageValue{
- Class: classVal,
- Size: sizeVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewStorageValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) StorageValue {
- object, diags := NewStorageValue(attributeTypes, attributes)
-
- if diags.HasError() {
- // This could potentially be added to the diag package.
- diagsStrings := make([]string, 0, len(diags))
-
- for _, diagnostic := range diags {
- diagsStrings = append(diagsStrings, fmt.Sprintf(
- "%s | %s | %s",
- diagnostic.Severity(),
- diagnostic.Summary(),
- diagnostic.Detail()))
- }
-
- panic("NewStorageValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
- }
-
- return object
-}
-
-func (t StorageType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
- if in.Type() == nil {
- return NewStorageValueNull(), nil
- }
-
- if !in.Type().Equal(t.TerraformType(ctx)) {
- return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
- }
-
- if !in.IsKnown() {
- return NewStorageValueUnknown(), nil
- }
-
- if in.IsNull() {
- return NewStorageValueNull(), nil
- }
-
- attributes := map[string]attr.Value{}
-
- val := map[string]tftypes.Value{}
-
- err := in.As(&val)
-
- if err != nil {
- return nil, err
- }
-
- for k, v := range val {
- a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
-
- if err != nil {
- return nil, err
- }
-
- attributes[k] = a
- }
-
- return NewStorageValueMust(StorageValue{}.AttributeTypes(ctx), attributes), nil
-}
-
-func (t StorageType) ValueType(ctx context.Context) attr.Value {
- return StorageValue{}
-}
-
-var _ basetypes.ObjectValuable = StorageValue{}
-
-type StorageValue struct {
- Class basetypes.StringValue `tfsdk:"class"`
- Size basetypes.Int64Value `tfsdk:"size"`
- state attr.ValueState
-}
-
-func (v StorageValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 2)
-
- var val tftypes.Value
- var err error
-
- attrTypes["class"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
-
- objectType := tftypes.Object{AttributeTypes: attrTypes}
-
- switch v.state {
- case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 2)
-
- val, err = v.Class.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["class"] = val
-
- val, err = v.Size.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["size"] = val
-
- if err := tftypes.ValidateValue(objectType, vals); err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- return tftypes.NewValue(objectType, vals), nil
- case attr.ValueStateNull:
- return tftypes.NewValue(objectType, nil), nil
- case attr.ValueStateUnknown:
- return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
- default:
- panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
- }
-}
-
-func (v StorageValue) IsNull() bool {
- return v.state == attr.ValueStateNull
-}
-
-func (v StorageValue) IsUnknown() bool {
- return v.state == attr.ValueStateUnknown
-}
-
-func (v StorageValue) String() string {
- return "StorageValue"
-}
-
-func (v StorageValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributeTypes := map[string]attr.Type{
- "class": basetypes.StringType{},
- "size": basetypes.Int64Type{},
- }
-
- if v.IsNull() {
- return types.ObjectNull(attributeTypes), diags
- }
-
- if v.IsUnknown() {
- return types.ObjectUnknown(attributeTypes), diags
- }
-
- objVal, diags := types.ObjectValue(
- attributeTypes,
- map[string]attr.Value{
- "class": v.Class,
- "size": v.Size,
- })
-
- return objVal, diags
-}
-
-func (v StorageValue) Equal(o attr.Value) bool {
- other, ok := o.(StorageValue)
-
- if !ok {
- return false
- }
-
- if v.state != other.state {
- return false
- }
-
- if v.state != attr.ValueStateKnown {
- return true
- }
-
- if !v.Class.Equal(other.Class) {
- return false
- }
-
- if !v.Size.Equal(other.Size) {
- return false
- }
-
- return true
-}
-
-func (v StorageValue) Type(ctx context.Context) attr.Type {
- return StorageType{
- basetypes.ObjectType{
- AttrTypes: v.AttributeTypes(ctx),
- },
- }
-}
-
-func (v StorageValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
- return map[string]attr.Type{
- "class": basetypes.StringType{},
- "size": basetypes.Int64Type{},
- }
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go b/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go
deleted file mode 100644
index c12bfa5d..00000000
--- a/stackit/internal/services/sqlserverflexbeta/sqlserverflex_acc_test.go
+++ /dev/null
@@ -1,446 +0,0 @@
-package sqlserverflexbeta_test
-
-import (
- "context"
- _ "embed"
- "fmt"
- "os"
- "strconv"
- "testing"
-
- "github.com/hashicorp/terraform-plugin-testing/helper/acctest"
- "github.com/hashicorp/terraform-plugin-testing/helper/resource"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/internal/testutils"
- sqlserverflexbeta "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance"
-
- // The fwresource import alias is so there is no collision
- // with the more typical acceptance testing import:
- // "github.com/hashicorp/terraform-plugin-testing/helper/resource"
- fwresource "github.com/hashicorp/terraform-plugin-framework/resource"
-)
-
-const providerPrefix = "stackitprivatepreview_sqlserverflexbeta"
-
-func TestInstanceResourceSchema(t *testing.T) {
- t.Parallel()
-
- ctx := context.Background()
- schemaRequest := fwresource.SchemaRequest{}
- schemaResponse := &fwresource.SchemaResponse{}
-
- // Instantiate the resource.Resource and call its Schema method
- sqlserverflexbeta.NewInstanceResource().Schema(ctx, schemaRequest, schemaResponse)
-
- if schemaResponse.Diagnostics.HasError() {
- t.Fatalf("Schema method diagnostics: %+v", schemaResponse.Diagnostics)
- }
-
- // Validate the schema
- diagnostics := schemaResponse.Schema.ValidateImplementation(ctx)
-
- if diagnostics.HasError() {
- t.Fatalf("Schema validation diagnostics: %+v", diagnostics)
- }
-}
-
-func TestMain(m *testing.M) {
- testutils.Setup()
- code := m.Run()
- // shutdown()
- os.Exit(code)
-}
-
-func testAccPreCheck(t *testing.T) {
- if _, ok := os.LookupEnv("TF_ACC_PROJECT_ID"); !ok {
- t.Fatalf("could not find env var TF_ACC_PROJECT_ID")
- }
-}
-
-type resData struct {
- ServiceAccountFilePath string
- ProjectId string
- Region string
- Name string
- TfName string
- FlavorId string
- BackupSchedule string
- UseEncryption bool
- KekKeyId string
- KekKeyRingId string
- KekKeyVersion uint8
- KekServiceAccount string
- PerformanceClass string
- Size uint32
- AclString string
- AccessScope string
- RetentionDays uint32
- Version string
- Users []User
- Databases []Database
-}
-
-type User struct {
- Name string
- ProjectId string
- Roles []string
-}
-
-type Database struct {
- Name string
- ProjectId string
- Owner string
- Collation string
- Compatibility string
-}
-
-func resName(res, name string) string {
- return fmt.Sprintf("%s_%s.%s", providerPrefix, res, name)
-}
-
-func getExample() resData {
- name := acctest.RandomWithPrefix("tf-acc")
- return resData{
- Region: os.Getenv("TF_ACC_REGION"),
- ServiceAccountFilePath: os.Getenv("TF_ACC_SERVICE_ACCOUNT_FILE"),
- ProjectId: os.Getenv("TF_ACC_PROJECT_ID"),
- Name: name,
- TfName: name,
- FlavorId: "4.16-Single",
- BackupSchedule: "0 0 * * *",
- UseEncryption: false,
- RetentionDays: 33,
- PerformanceClass: "premium-perf2-stackit",
- Size: 10,
- AclString: "0.0.0.0/0",
- AccessScope: "PUBLIC",
- Version: "2022",
- }
-}
-
-func TestAccInstance(t *testing.T) {
- exData := getExample()
-
- updNameData := exData
- updNameData.Name = "name-updated"
-
- updSizeData := exData
- updSizeData.Size = 25
-
- resource.ParallelTest(t, resource.TestCase{
- PreCheck: func() {
- testAccPreCheck(t)
- t.Logf(" ... working on instance %s", exData.TfName)
- },
- ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
- Steps: []resource.TestStep{
- // Create and verify
- {
- Config: testutils.StringFromTemplateMust(
- "testdata/instance_template.gompl",
- exData,
- ),
- Check: resource.ComposeAggregateTestCheckFunc(
- resource.TestCheckResourceAttr(resName("instance", exData.TfName), "name", exData.Name),
- resource.TestCheckResourceAttrSet(resName("instance", exData.TfName), "id"),
- // TODO: check all fields
- ),
- },
- // Update name and verify
- {
- Config: testutils.StringFromTemplateMust(
- "testdata/instance_template.gompl",
- updNameData,
- ),
- Check: resource.ComposeTestCheckFunc(
- resource.TestCheckResourceAttr(resName("instance", exData.TfName), "name", updNameData.Name),
- ),
- },
- // Update size and verify
- {
- Config: testutils.StringFromTemplateMust(
- "testdata/instance_template.gompl",
- updSizeData,
- ),
- Check: resource.ComposeTestCheckFunc(
- resource.TestCheckResourceAttr(
- testutils.ResStr(providerPrefix, "instance", exData.TfName),
- "storage.size",
- strconv.Itoa(int(updSizeData.Size)),
- ),
- ),
- },
- {
- RefreshState: true,
- },
- //// Import test
- //{
- // ResourceName: resName("instance", exData.TfName),
- // ImportState: true,
- // ImportStateVerify: true,
- // },
- },
- })
-}
-
-func TestAccInstanceReApply(t *testing.T) {
- exData := getExample()
- resource.ParallelTest(t, resource.TestCase{
- PreCheck: func() {
- testAccPreCheck(t)
- t.Logf(" ... working on instance %s", exData.TfName)
- },
- ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
- Steps: []resource.TestStep{
- // Create and verify
- {
- Config: testutils.StringFromTemplateMust(
- "testdata/instance_template.gompl",
- exData,
- ),
- Check: resource.ComposeAggregateTestCheckFunc(
- resource.TestCheckResourceAttr(resName("instance", exData.TfName), "name", exData.Name),
- resource.TestCheckResourceAttrSet(resName("instance", exData.TfName), "id"),
- // TODO: check all fields
- ),
- },
- // Create and verify
- {
- Config: testutils.StringFromTemplateMust(
- "testdata/instance_template.gompl",
- exData,
- ),
- Check: resource.ComposeAggregateTestCheckFunc(
- resource.TestCheckResourceAttr(resName("instance", exData.TfName), "name", exData.Name),
- resource.TestCheckResourceAttrSet(resName("instance", exData.TfName), "id"),
- // TODO: check all fields
- ),
- },
- {
- RefreshState: true,
- },
- // Create and verify
- {
- Config: testutils.StringFromTemplateMust(
- "testdata/instance_template.gompl",
- exData,
- ),
- Check: resource.ComposeAggregateTestCheckFunc(
- resource.TestCheckResourceAttr(resName("instance", exData.TfName), "name", exData.Name),
- resource.TestCheckResourceAttrSet(resName("instance", exData.TfName), "id"),
- // TODO: check all fields
- ),
- },
- // Import test
- {
- ResourceName: resName("instance", exData.TfName),
- ImportStateKind: resource.ImportBlockWithResourceIdentity,
- ImportState: true,
- // ImportStateVerify is not supported with plannable import blocks
- // ImportStateVerify: true,
- },
- },
- })
-}
-
-func TestAccInstanceNoEncryption(t *testing.T) {
- data := getExample()
-
- dbName := "testDb"
- userName := "testUser"
- data.Users = []User{
- {
- Name: userName,
- ProjectId: os.Getenv("TF_ACC_PROJECT_ID"),
- Roles: []string{
- "##STACKIT_DatabaseManager##",
- "##STACKIT_LoginManager##",
- "##STACKIT_ProcessManager##",
- "##STACKIT_SQLAgentManager##",
- "##STACKIT_SQLAgentUser##",
- "##STACKIT_ServerManager##",
- },
- },
- }
- data.Databases = []Database{
- {
- Name: dbName,
- ProjectId: os.Getenv("TF_ACC_PROJECT_ID"),
- Owner: userName,
- },
- }
-
- resource.ParallelTest(t, resource.TestCase{
- PreCheck: func() {
- testAccPreCheck(t)
- t.Logf(" ... working on instance %s", data.TfName)
- },
- ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
- Steps: []resource.TestStep{
- // Create and verify
- {
- Config: testutils.StringFromTemplateMust(
- "testdata/instance_template.gompl",
- data,
- ),
- Check: resource.ComposeAggregateTestCheckFunc(
- // check instance values are set
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "id"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "backup_schedule"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "edition"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "flavor_id"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "instance_id"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "is_deletable"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "name"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "replicas"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "retention_days"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "status"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "version"),
-
- resource.TestCheckNoResourceAttr(resName("instance", data.TfName), "encryption"),
-
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_id"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_version"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"),
-
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.access_scope"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.acl"),
-
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.instance_address"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.router_address"),
-
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "storage.class"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "storage.size"),
-
- // check instance values are correct
- resource.TestCheckResourceAttr(resName("instance", data.TfName), "name", data.Name),
-
- // check user values are set
- resource.TestCheckResourceAttrSet(resName("user", userName), "id"),
- resource.TestCheckResourceAttrSet(resName("user", userName), "username"),
- // resource.TestCheckResourceAttrSet(resName("user", userName), "roles"),
-
- // func(s *terraform.State) error {
- // return nil
- // },
-
- // check user values are correct
- resource.TestCheckResourceAttr(resName("user", userName), "username", userName),
- resource.TestCheckResourceAttr(resName("user", userName), "roles.#", strconv.Itoa(len(data.Users[0].Roles))),
-
- // check database values are set
- resource.TestCheckResourceAttrSet(resName("database", dbName), "id"),
- resource.TestCheckResourceAttrSet(resName("database", dbName), "name"),
- resource.TestCheckResourceAttrSet(resName("database", dbName), "owner"),
- resource.TestCheckResourceAttrSet(resName("database", dbName), "compatibility"),
- resource.TestCheckResourceAttrSet(resName("database", dbName), "collation"),
-
- // check database values are correct
- resource.TestCheckResourceAttr(resName("database", dbName), "name", dbName),
- resource.TestCheckResourceAttr(resName("database", dbName), "owner", userName),
- ),
- },
- },
- })
-}
-
-func TestAccInstanceEncryption(t *testing.T) {
- data := getExample()
-
- dbName := "testDb"
- userName := "testUser"
- data.Users = []User{
- {
- Name: userName,
- ProjectId: os.Getenv("TF_ACC_PROJECT_ID"),
- Roles: []string{"##STACKIT_DatabaseManager##", "##STACKIT_LoginManager##"},
- },
- }
- data.Databases = []Database{
- {
- Name: dbName,
- ProjectId: os.Getenv("TF_ACC_PROJECT_ID"),
- Owner: userName,
- },
- }
-
- data.UseEncryption = true
- data.KekKeyId = "fe039bcf-8d7b-431a-801d-9e81371a6b7b"
- data.KekKeyRingId = "6a2d95ab-3c4c-4963-a2bb-08d17a320e27"
- data.KekKeyVersion = 1
- data.KekServiceAccount = "henselinm-u2v3ex1@sa.stackit.cloud"
-
- resource.ParallelTest(t, resource.TestCase{
- PreCheck: func() {
- testAccPreCheck(t)
- t.Logf(" ... working on instance %s", data.TfName)
- },
- ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
- Steps: []resource.TestStep{
- // Create and verify
- {
- Config: testutils.StringFromTemplateMust(
- "testdata/instance_template.gompl",
- data,
- ),
- Check: resource.ComposeAggregateTestCheckFunc(
- // check instance values are set
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "id"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "backup_schedule"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "edition"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "flavor_id"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "instance_id"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "is_deletable"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "name"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "replicas"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "retention_days"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "status"),
- resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "version"),
-
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_id"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_version"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"),
-
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.access_scope"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.acl"),
-
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.instance_address"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.router_address"),
-
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "storage.class"),
- // resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "storage.size"),
-
- // check instance values are correct
- resource.TestCheckResourceAttr(resName("instance", data.TfName), "name", data.Name),
-
- // check user values are set
- resource.TestCheckResourceAttrSet(resName("user", userName), "id"),
- resource.TestCheckResourceAttrSet(resName("user", userName), "username"),
-
- // func(s *terraform.State) error {
- // return nil
- // },
-
- // check user values are correct
- resource.TestCheckResourceAttr(resName("user", userName), "username", userName),
- resource.TestCheckResourceAttr(resName("user", userName), "roles.#", "2"),
-
- // check database values are set
- resource.TestCheckResourceAttrSet(resName("database", dbName), "id"),
- resource.TestCheckResourceAttrSet(resName("database", dbName), "name"),
- resource.TestCheckResourceAttrSet(resName("database", dbName), "owner"),
- resource.TestCheckResourceAttrSet(resName("database", dbName), "compatibility"),
- resource.TestCheckResourceAttrSet(resName("database", dbName), "collation"),
-
- // check database values are correct
- resource.TestCheckResourceAttr(resName("database", dbName), "name", dbName),
- resource.TestCheckResourceAttr(resName("database", dbName), "owner", userName),
- ),
- },
- },
- })
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/testdata/instance_template.gompl b/stackit/internal/services/sqlserverflexbeta/testdata/instance_template.gompl
deleted file mode 100644
index e71f3fa0..00000000
--- a/stackit/internal/services/sqlserverflexbeta/testdata/instance_template.gompl
+++ /dev/null
@@ -1,60 +0,0 @@
-provider "stackitprivatepreview" {
- default_region = "{{ .Region }}"
- service_account_key_path = "{{ .ServiceAccountFilePath }}"
-}
-
-resource "stackitprivatepreview_sqlserverflexbeta_instance" "{{ .TfName }}" {
- project_id = "{{ .ProjectId }}"
- name = "{{ .Name }}"
- backup_schedule = "{{ .BackupSchedule }}"
- retention_days = {{ .RetentionDays }}
- flavor_id = "{{ .FlavorId }}"
- storage = {
- class = "{{ .PerformanceClass }}"
- size = {{ .Size }}
- }
-{{ if .UseEncryption }}
- encryption = {
- kek_key_id = "{{ .KekKeyId }}"
- kek_key_ring_id = "{{ .KekKeyRingId }}"
- kek_key_version = {{ .KekKeyVersion }}
- service_account = "{{ .KekServiceAccount }}"
- }
-{{ end }}
- network = {
- acl = ["{{ .AclString }}"]
- access_scope = "{{ .AccessScope }}"
- }
- version = "{{ .Version }}"
-}
-
-{{ if .Users }}
-{{ $tfName := .TfName }}
-{{ range $user := .Users }}
-resource "stackitprivatepreview_sqlserverflexbeta_user" "{{ $user.Name }}" {
- project_id = "{{ $user.ProjectId }}"
- instance_id = stackitprivatepreview_sqlserverflexbeta_instance.{{ $tfName }}.instance_id
- username = "{{ $user.Name }}"
- roles = [{{ range $i, $v := $user.Roles }}{{if $i}},{{end}}"{{$v}}"{{end}}]
-}
-{{ end }}
-{{ end }}
-
-{{ if .Databases }}
-{{ $tfName := .TfName }}
-{{ range $db := .Databases }}
-resource "stackitprivatepreview_sqlserverflexbeta_database" "{{ $db.Name }}" {
- depends_on = [stackitprivatepreview_sqlserverflexbeta_user.{{ $db.Owner }}]
- project_id = "{{ $db.ProjectId }}"
- instance_id = stackitprivatepreview_sqlserverflexbeta_instance.{{ $tfName }}.instance_id
- name = "{{ $db.Name }}"
- owner = "{{ $db.Owner }}"
-{{ if $db.Collation }}
- collation = "{{ $db.Collation }}"
-{{ end }}
-{{ if $db.Compatibility }}
- compatibility = "{{ $db.Compatibility }}"
-{{ end }}
-}
-{{ end }}
-{{ end }}
diff --git a/stackit/internal/services/sqlserverflexbeta/user/datasource.go b/stackit/internal/services/sqlserverflexbeta/user/datasource.go
deleted file mode 100644
index 68a20378..00000000
--- a/stackit/internal/services/sqlserverflexbeta/user/datasource.go
+++ /dev/null
@@ -1,140 +0,0 @@
-package sqlserverflexbeta
-
-import (
- "context"
- "fmt"
- "net/http"
-
- "github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-log/tflog"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- sqlserverflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/utils"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-
- "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3beta1api"
-
- sqlserverflexbetaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/user/datasources_gen"
-)
-
-var _ datasource.DataSource = (*userDataSource)(nil)
-
-func NewUserDataSource() datasource.DataSource {
- return &userDataSource{}
-}
-
-type dataSourceModel struct {
- DefaultDatabase types.String `tfsdk:"default_database"`
- Host types.String `tfsdk:"host"`
- Id types.String `tfsdk:"id"`
- InstanceId types.String `tfsdk:"instance_id"`
- Port types.Int64 `tfsdk:"port"`
- ProjectId types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- Roles types.List `tfsdk:"roles"`
- Status types.String `tfsdk:"status"`
- UserId types.Int64 `tfsdk:"user_id"`
- Username types.String `tfsdk:"username"`
-}
-
-type userDataSource struct {
- client *v3beta1api.APIClient
- providerData core.ProviderData
-}
-
-func (d *userDataSource) Metadata(
- _ context.Context,
- req datasource.MetadataRequest,
- resp *datasource.MetadataResponse,
-) {
- resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_user"
-}
-
-func (d *userDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- resp.Schema = sqlserverflexbetaGen.UserDataSourceSchema(ctx)
-}
-
-// Configure adds the provider configured client to the data source.
-func (d *userDataSource) Configure(
- ctx context.Context,
- req datasource.ConfigureRequest,
- resp *datasource.ConfigureResponse,
-) {
- var ok bool
- d.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
- if !ok {
- return
- }
-
- apiClient := sqlserverflexUtils.ConfigureClient(ctx, &d.providerData, &resp.Diagnostics)
- if resp.Diagnostics.HasError() {
- return
- }
- d.client = apiClient
- tflog.Info(ctx, "SQL SERVER Flex alpha database client configured")
-}
-
-func (d *userDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var model dataSourceModel
- diags := req.Config.Get(ctx, &model)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := model.ProjectId.ValueString()
- instanceId := model.InstanceId.ValueString()
- userId := model.UserId.ValueInt64()
- region := d.providerData.GetRegionWithOverride(model.Region)
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
- ctx = tflog.SetField(ctx, "user_id", userId)
- ctx = tflog.SetField(ctx, "region", region)
-
- recordSetResp, err := d.client.DefaultAPI.GetUserRequest(ctx, projectId, region, instanceId, userId).Execute()
- if err != nil {
- utils.LogError(
- ctx,
- &resp.Diagnostics,
- err,
- "Reading user",
- fmt.Sprintf(
- "User with ID %q or instance with ID %q does not exist in project %q.",
- userId,
- instanceId,
- projectId,
- ),
- map[int]string{
- http.StatusForbidden: fmt.Sprintf("Project with ID %q not found or forbidden access", projectId),
- },
- )
- resp.State.RemoveResource(ctx)
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- // Map response body to schema and populate Computed attribute values
- err = mapDataSourceFields(recordSetResp, &model, region)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error reading user",
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
-
- // Set refreshed state
- diags = resp.State.Set(ctx, model)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
- tflog.Info(ctx, "SQLServer Flex beta instance read")
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/user/datasources_gen/user_data_source_gen.go b/stackit/internal/services/sqlserverflexbeta/user/datasources_gen/user_data_source_gen.go
deleted file mode 100644
index 34aef9ca..00000000
--- a/stackit/internal/services/sqlserverflexbeta/user/datasources_gen/user_data_source_gen.go
+++ /dev/null
@@ -1,1118 +0,0 @@
-// Code generated by terraform-plugin-framework-generator DO NOT EDIT.
-
-package sqlserverflexbeta
-
-import (
- "context"
- "fmt"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
- "github.com/hashicorp/terraform-plugin-go/tftypes"
- "strings"
-
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
-)
-
-func UserDataSourceSchema(ctx context.Context) schema.Schema {
- return schema.Schema{
- Attributes: map[string]schema.Attribute{
- "instance_id": schema.StringAttribute{
- Required: true,
- Description: "The ID of the instance.",
- MarkdownDescription: "The ID of the instance.",
- },
- "page": schema.Int64Attribute{
- Optional: true,
- Computed: true,
- Description: "Number of the page of items list to be returned.",
- MarkdownDescription: "Number of the page of items list to be returned.",
- },
- "pagination": schema.SingleNestedAttribute{
- Attributes: map[string]schema.Attribute{
- "page": schema.Int64Attribute{
- Computed: true,
- },
- "size": schema.Int64Attribute{
- Computed: true,
- },
- "sort": schema.StringAttribute{
- Computed: true,
- },
- "total_pages": schema.Int64Attribute{
- Computed: true,
- },
- "total_rows": schema.Int64Attribute{
- Computed: true,
- },
- },
- CustomType: PaginationType{
- ObjectType: types.ObjectType{
- AttrTypes: PaginationValue{}.AttributeTypes(ctx),
- },
- },
- Computed: true,
- },
- "project_id": schema.StringAttribute{
- Required: true,
- Description: "The STACKIT project ID.",
- MarkdownDescription: "The STACKIT project ID.",
- },
- "region": schema.StringAttribute{
- Required: true,
- Description: "The region which should be addressed",
- MarkdownDescription: "The region which should be addressed",
- Validators: []validator.String{
- stringvalidator.OneOf(
- "eu01",
- ),
- },
- },
- "size": schema.Int64Attribute{
- Optional: true,
- Computed: true,
- Description: "Number of items to be returned on each page.",
- MarkdownDescription: "Number of items to be returned on each page.",
- },
- "sort": schema.StringAttribute{
- Optional: true,
- Computed: true,
- Description: "Sorting of the users to be returned on each page.",
- MarkdownDescription: "Sorting of the users to be returned on each page.",
- Validators: []validator.String{
- stringvalidator.OneOf(
- "id.asc",
- "id.desc",
- "index.desc",
- "index.asc",
- "name.desc",
- "name.asc",
- "status.desc",
- "status.asc",
- ),
- },
- },
- "users": schema.ListNestedAttribute{
- NestedObject: schema.NestedAttributeObject{
- Attributes: map[string]schema.Attribute{
- "tf_original_api_id": schema.Int64Attribute{
- Computed: true,
- Description: "The ID of the user.",
- MarkdownDescription: "The ID of the user.",
- },
- "status": schema.StringAttribute{
- Computed: true,
- Description: "The current status of the user.",
- MarkdownDescription: "The current status of the user.",
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: "The name of the user.",
- MarkdownDescription: "The name of the user.",
- },
- },
- CustomType: UsersType{
- ObjectType: types.ObjectType{
- AttrTypes: UsersValue{}.AttributeTypes(ctx),
- },
- },
- },
- Computed: true,
- Description: "List of all users inside an instance",
- MarkdownDescription: "List of all users inside an instance",
- },
- },
- }
-}
-
-type UserModel struct {
- InstanceId types.String `tfsdk:"instance_id"`
- Page types.Int64 `tfsdk:"page"`
- Pagination PaginationValue `tfsdk:"pagination"`
- ProjectId types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- Size types.Int64 `tfsdk:"size"`
- Sort types.String `tfsdk:"sort"`
- Users types.List `tfsdk:"users"`
-}
-
-var _ basetypes.ObjectTypable = PaginationType{}
-
-type PaginationType struct {
- basetypes.ObjectType
-}
-
-func (t PaginationType) Equal(o attr.Type) bool {
- other, ok := o.(PaginationType)
-
- if !ok {
- return false
- }
-
- return t.ObjectType.Equal(other.ObjectType)
-}
-
-func (t PaginationType) String() string {
- return "PaginationType"
-}
-
-func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributes := in.Attributes()
-
- pageAttribute, ok := attributes["page"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `page is missing from object`)
-
- return nil, diags
- }
-
- pageVal, ok := pageAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
- }
-
- sizeAttribute, ok := attributes["size"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `size is missing from object`)
-
- return nil, diags
- }
-
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
- }
-
- sortAttribute, ok := attributes["sort"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `sort is missing from object`)
-
- return nil, diags
- }
-
- sortVal, ok := sortAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute))
- }
-
- totalPagesAttribute, ok := attributes["total_pages"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `total_pages is missing from object`)
-
- return nil, diags
- }
-
- totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
- }
-
- totalRowsAttribute, ok := attributes["total_rows"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `total_rows is missing from object`)
-
- return nil, diags
- }
-
- totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
- }
-
- if diags.HasError() {
- return nil, diags
- }
-
- return PaginationValue{
- Page: pageVal,
- Size: sizeVal,
- Sort: sortVal,
- TotalPages: totalPagesVal,
- TotalRows: totalRowsVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewPaginationValueNull() PaginationValue {
- return PaginationValue{
- state: attr.ValueStateNull,
- }
-}
-
-func NewPaginationValueUnknown() PaginationValue {
- return PaginationValue{
- state: attr.ValueStateUnknown,
- }
-}
-
-func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (PaginationValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
- ctx := context.Background()
-
- for name, attributeType := range attributeTypes {
- attribute, ok := attributes[name]
-
- if !ok {
- diags.AddError(
- "Missing PaginationValue Attribute Value",
- "While creating a PaginationValue value, a missing attribute value was detected. "+
- "A PaginationValue must contain values for all attributes, even if null or unknown. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
- )
-
- continue
- }
-
- if !attributeType.Equal(attribute.Type(ctx)) {
- diags.AddError(
- "Invalid PaginationValue Attribute Type",
- "While creating a PaginationValue value, an invalid attribute value was detected. "+
- "A PaginationValue must use a matching attribute type for the value. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("PaginationValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
- fmt.Sprintf("PaginationValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
- )
- }
- }
-
- for name := range attributes {
- _, ok := attributeTypes[name]
-
- if !ok {
- diags.AddError(
- "Extra PaginationValue Attribute Value",
- "While creating a PaginationValue value, an extra attribute value was detected. "+
- "A PaginationValue must not contain values beyond the expected attribute types. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("Extra PaginationValue Attribute Name: %s", name),
- )
- }
- }
-
- if diags.HasError() {
- return NewPaginationValueUnknown(), diags
- }
-
- pageAttribute, ok := attributes["page"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `page is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- pageVal, ok := pageAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
- }
-
- sizeAttribute, ok := attributes["size"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `size is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
- }
-
- sortAttribute, ok := attributes["sort"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `sort is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- sortVal, ok := sortAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`sort expected to be basetypes.StringValue, was: %T`, sortAttribute))
- }
-
- totalPagesAttribute, ok := attributes["total_pages"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `total_pages is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
- }
-
- totalRowsAttribute, ok := attributes["total_rows"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `total_rows is missing from object`)
-
- return NewPaginationValueUnknown(), diags
- }
-
- totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
- }
-
- if diags.HasError() {
- return NewPaginationValueUnknown(), diags
- }
-
- return PaginationValue{
- Page: pageVal,
- Size: sizeVal,
- Sort: sortVal,
- TotalPages: totalPagesVal,
- TotalRows: totalRowsVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewPaginationValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) PaginationValue {
- object, diags := NewPaginationValue(attributeTypes, attributes)
-
- if diags.HasError() {
- // This could potentially be added to the diag package.
- diagsStrings := make([]string, 0, len(diags))
-
- for _, diagnostic := range diags {
- diagsStrings = append(diagsStrings, fmt.Sprintf(
- "%s | %s | %s",
- diagnostic.Severity(),
- diagnostic.Summary(),
- diagnostic.Detail()))
- }
-
- panic("NewPaginationValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
- }
-
- return object
-}
-
-func (t PaginationType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
- if in.Type() == nil {
- return NewPaginationValueNull(), nil
- }
-
- if !in.Type().Equal(t.TerraformType(ctx)) {
- return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
- }
-
- if !in.IsKnown() {
- return NewPaginationValueUnknown(), nil
- }
-
- if in.IsNull() {
- return NewPaginationValueNull(), nil
- }
-
- attributes := map[string]attr.Value{}
-
- val := map[string]tftypes.Value{}
-
- err := in.As(&val)
-
- if err != nil {
- return nil, err
- }
-
- for k, v := range val {
- a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
-
- if err != nil {
- return nil, err
- }
-
- attributes[k] = a
- }
-
- return NewPaginationValueMust(PaginationValue{}.AttributeTypes(ctx), attributes), nil
-}
-
-func (t PaginationType) ValueType(ctx context.Context) attr.Value {
- return PaginationValue{}
-}
-
-var _ basetypes.ObjectValuable = PaginationValue{}
-
-type PaginationValue struct {
- Page basetypes.Int64Value `tfsdk:"page"`
- Size basetypes.Int64Value `tfsdk:"size"`
- Sort basetypes.StringValue `tfsdk:"sort"`
- TotalPages basetypes.Int64Value `tfsdk:"total_pages"`
- TotalRows basetypes.Int64Value `tfsdk:"total_rows"`
- state attr.ValueState
-}
-
-func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 5)
-
- var val tftypes.Value
- var err error
-
- attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx)
-
- objectType := tftypes.Object{AttributeTypes: attrTypes}
-
- switch v.state {
- case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 5)
-
- val, err = v.Page.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["page"] = val
-
- val, err = v.Size.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["size"] = val
-
- val, err = v.Sort.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["sort"] = val
-
- val, err = v.TotalPages.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["total_pages"] = val
-
- val, err = v.TotalRows.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["total_rows"] = val
-
- if err := tftypes.ValidateValue(objectType, vals); err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- return tftypes.NewValue(objectType, vals), nil
- case attr.ValueStateNull:
- return tftypes.NewValue(objectType, nil), nil
- case attr.ValueStateUnknown:
- return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
- default:
- panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
- }
-}
-
-func (v PaginationValue) IsNull() bool {
- return v.state == attr.ValueStateNull
-}
-
-func (v PaginationValue) IsUnknown() bool {
- return v.state == attr.ValueStateUnknown
-}
-
-func (v PaginationValue) String() string {
- return "PaginationValue"
-}
-
-func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributeTypes := map[string]attr.Type{
- "page": basetypes.Int64Type{},
- "size": basetypes.Int64Type{},
- "sort": basetypes.StringType{},
- "total_pages": basetypes.Int64Type{},
- "total_rows": basetypes.Int64Type{},
- }
-
- if v.IsNull() {
- return types.ObjectNull(attributeTypes), diags
- }
-
- if v.IsUnknown() {
- return types.ObjectUnknown(attributeTypes), diags
- }
-
- objVal, diags := types.ObjectValue(
- attributeTypes,
- map[string]attr.Value{
- "page": v.Page,
- "size": v.Size,
- "sort": v.Sort,
- "total_pages": v.TotalPages,
- "total_rows": v.TotalRows,
- })
-
- return objVal, diags
-}
-
-func (v PaginationValue) Equal(o attr.Value) bool {
- other, ok := o.(PaginationValue)
-
- if !ok {
- return false
- }
-
- if v.state != other.state {
- return false
- }
-
- if v.state != attr.ValueStateKnown {
- return true
- }
-
- if !v.Page.Equal(other.Page) {
- return false
- }
-
- if !v.Size.Equal(other.Size) {
- return false
- }
-
- if !v.Sort.Equal(other.Sort) {
- return false
- }
-
- if !v.TotalPages.Equal(other.TotalPages) {
- return false
- }
-
- if !v.TotalRows.Equal(other.TotalRows) {
- return false
- }
-
- return true
-}
-
-func (v PaginationValue) Type(ctx context.Context) attr.Type {
- return PaginationType{
- basetypes.ObjectType{
- AttrTypes: v.AttributeTypes(ctx),
- },
- }
-}
-
-func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
- return map[string]attr.Type{
- "page": basetypes.Int64Type{},
- "size": basetypes.Int64Type{},
- "sort": basetypes.StringType{},
- "total_pages": basetypes.Int64Type{},
- "total_rows": basetypes.Int64Type{},
- }
-}
-
-var _ basetypes.ObjectTypable = UsersType{}
-
-type UsersType struct {
- basetypes.ObjectType
-}
-
-func (t UsersType) Equal(o attr.Type) bool {
- other, ok := o.(UsersType)
-
- if !ok {
- return false
- }
-
- return t.ObjectType.Equal(other.ObjectType)
-}
-
-func (t UsersType) String() string {
- return "UsersType"
-}
-
-func (t UsersType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributes := in.Attributes()
-
- idAttribute, ok := attributes["id"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `id is missing from object`)
-
- return nil, diags
- }
-
- idVal, ok := idAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`id expected to be basetypes.Int64Value, was: %T`, idAttribute))
- }
-
- statusAttribute, ok := attributes["status"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `status is missing from object`)
-
- return nil, diags
- }
-
- statusVal, ok := statusAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`status expected to be basetypes.StringValue, was: %T`, statusAttribute))
- }
-
- usernameAttribute, ok := attributes["username"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `username is missing from object`)
-
- return nil, diags
- }
-
- usernameVal, ok := usernameAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`username expected to be basetypes.StringValue, was: %T`, usernameAttribute))
- }
-
- if diags.HasError() {
- return nil, diags
- }
-
- return UsersValue{
- Id: idVal,
- Status: statusVal,
- Username: usernameVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewUsersValueNull() UsersValue {
- return UsersValue{
- state: attr.ValueStateNull,
- }
-}
-
-func NewUsersValueUnknown() UsersValue {
- return UsersValue{
- state: attr.ValueStateUnknown,
- }
-}
-
-func NewUsersValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (UsersValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
- ctx := context.Background()
-
- for name, attributeType := range attributeTypes {
- attribute, ok := attributes[name]
-
- if !ok {
- diags.AddError(
- "Missing UsersValue Attribute Value",
- "While creating a UsersValue value, a missing attribute value was detected. "+
- "A UsersValue must contain values for all attributes, even if null or unknown. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("UsersValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
- )
-
- continue
- }
-
- if !attributeType.Equal(attribute.Type(ctx)) {
- diags.AddError(
- "Invalid UsersValue Attribute Type",
- "While creating a UsersValue value, an invalid attribute value was detected. "+
- "A UsersValue must use a matching attribute type for the value. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("UsersValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
- fmt.Sprintf("UsersValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
- )
- }
- }
-
- for name := range attributes {
- _, ok := attributeTypes[name]
-
- if !ok {
- diags.AddError(
- "Extra UsersValue Attribute Value",
- "While creating a UsersValue value, an extra attribute value was detected. "+
- "A UsersValue must not contain values beyond the expected attribute types. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("Extra UsersValue Attribute Name: %s", name),
- )
- }
- }
-
- if diags.HasError() {
- return NewUsersValueUnknown(), diags
- }
-
- idAttribute, ok := attributes["id"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `id is missing from object`)
-
- return NewUsersValueUnknown(), diags
- }
-
- idVal, ok := idAttribute.(basetypes.Int64Value)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`id expected to be basetypes.Int64Value, was: %T`, idAttribute))
- }
-
- statusAttribute, ok := attributes["status"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `status is missing from object`)
-
- return NewUsersValueUnknown(), diags
- }
-
- statusVal, ok := statusAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`status expected to be basetypes.StringValue, was: %T`, statusAttribute))
- }
-
- usernameAttribute, ok := attributes["username"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `username is missing from object`)
-
- return NewUsersValueUnknown(), diags
- }
-
- usernameVal, ok := usernameAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`username expected to be basetypes.StringValue, was: %T`, usernameAttribute))
- }
-
- if diags.HasError() {
- return NewUsersValueUnknown(), diags
- }
-
- return UsersValue{
- Id: idVal,
- Status: statusVal,
- Username: usernameVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewUsersValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) UsersValue {
- object, diags := NewUsersValue(attributeTypes, attributes)
-
- if diags.HasError() {
- // This could potentially be added to the diag package.
- diagsStrings := make([]string, 0, len(diags))
-
- for _, diagnostic := range diags {
- diagsStrings = append(diagsStrings, fmt.Sprintf(
- "%s | %s | %s",
- diagnostic.Severity(),
- diagnostic.Summary(),
- diagnostic.Detail()))
- }
-
- panic("NewUsersValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
- }
-
- return object
-}
-
-func (t UsersType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
- if in.Type() == nil {
- return NewUsersValueNull(), nil
- }
-
- if !in.Type().Equal(t.TerraformType(ctx)) {
- return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
- }
-
- if !in.IsKnown() {
- return NewUsersValueUnknown(), nil
- }
-
- if in.IsNull() {
- return NewUsersValueNull(), nil
- }
-
- attributes := map[string]attr.Value{}
-
- val := map[string]tftypes.Value{}
-
- err := in.As(&val)
-
- if err != nil {
- return nil, err
- }
-
- for k, v := range val {
- a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
-
- if err != nil {
- return nil, err
- }
-
- attributes[k] = a
- }
-
- return NewUsersValueMust(UsersValue{}.AttributeTypes(ctx), attributes), nil
-}
-
-func (t UsersType) ValueType(ctx context.Context) attr.Value {
- return UsersValue{}
-}
-
-var _ basetypes.ObjectValuable = UsersValue{}
-
-type UsersValue struct {
- Id basetypes.Int64Value `tfsdk:"id"`
- Status basetypes.StringValue `tfsdk:"status"`
- Username basetypes.StringValue `tfsdk:"username"`
- state attr.ValueState
-}
-
-func (v UsersValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 3)
-
- var val tftypes.Value
- var err error
-
- attrTypes["id"] = basetypes.Int64Type{}.TerraformType(ctx)
- attrTypes["status"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["username"] = basetypes.StringType{}.TerraformType(ctx)
-
- objectType := tftypes.Object{AttributeTypes: attrTypes}
-
- switch v.state {
- case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 3)
-
- val, err = v.Id.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["id"] = val
-
- val, err = v.Status.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["status"] = val
-
- val, err = v.Username.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["username"] = val
-
- if err := tftypes.ValidateValue(objectType, vals); err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- return tftypes.NewValue(objectType, vals), nil
- case attr.ValueStateNull:
- return tftypes.NewValue(objectType, nil), nil
- case attr.ValueStateUnknown:
- return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
- default:
- panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
- }
-}
-
-func (v UsersValue) IsNull() bool {
- return v.state == attr.ValueStateNull
-}
-
-func (v UsersValue) IsUnknown() bool {
- return v.state == attr.ValueStateUnknown
-}
-
-func (v UsersValue) String() string {
- return "UsersValue"
-}
-
-func (v UsersValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributeTypes := map[string]attr.Type{
- "id": basetypes.Int64Type{},
- "status": basetypes.StringType{},
- "username": basetypes.StringType{},
- }
-
- if v.IsNull() {
- return types.ObjectNull(attributeTypes), diags
- }
-
- if v.IsUnknown() {
- return types.ObjectUnknown(attributeTypes), diags
- }
-
- objVal, diags := types.ObjectValue(
- attributeTypes,
- map[string]attr.Value{
- "id": v.Id,
- "status": v.Status,
- "username": v.Username,
- })
-
- return objVal, diags
-}
-
-func (v UsersValue) Equal(o attr.Value) bool {
- other, ok := o.(UsersValue)
-
- if !ok {
- return false
- }
-
- if v.state != other.state {
- return false
- }
-
- if v.state != attr.ValueStateKnown {
- return true
- }
-
- if !v.Id.Equal(other.Id) {
- return false
- }
-
- if !v.Status.Equal(other.Status) {
- return false
- }
-
- if !v.Username.Equal(other.Username) {
- return false
- }
-
- return true
-}
-
-func (v UsersValue) Type(ctx context.Context) attr.Type {
- return UsersType{
- basetypes.ObjectType{
- AttrTypes: v.AttributeTypes(ctx),
- },
- }
-}
-
-func (v UsersValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
- return map[string]attr.Type{
- "id": basetypes.Int64Type{},
- "status": basetypes.StringType{},
- "username": basetypes.StringType{},
- }
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/user/mapper.go b/stackit/internal/services/sqlserverflexbeta/user/mapper.go
deleted file mode 100644
index 73d9e6c0..00000000
--- a/stackit/internal/services/sqlserverflexbeta/user/mapper.go
+++ /dev/null
@@ -1,194 +0,0 @@
-package sqlserverflexbeta
-
-import (
- "fmt"
- "slices"
- "strconv"
-
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/types"
-
- "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3beta1api"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-)
-
-// mapDataSourceFields maps the API response to a dataSourceModel.
-func mapDataSourceFields(userResp *v3beta1api.GetUserResponse, model *dataSourceModel, region string) error {
- if userResp == nil {
- return fmt.Errorf("response is nil")
- }
- if model == nil {
- return fmt.Errorf("model input is nil")
- }
- user := userResp
-
- // Handle user ID
- var userId int64
- if model.UserId.ValueInt64() != 0 {
- userId = model.UserId.ValueInt64()
- } else if user.Id != 0 {
- userId = user.Id
- } else {
- return fmt.Errorf("user id not present")
- }
-
- // Set main attributes
- model.Id = utils.BuildInternalTerraformId(
- model.ProjectId.ValueString(), region, model.InstanceId.ValueString(), strconv.FormatInt(userId, 10),
- )
- model.UserId = types.Int64Value(userId)
- model.Username = types.StringValue(user.Username)
-
- // Map roles
- if user.Roles == nil {
- model.Roles = types.List(types.SetNull(types.StringType))
- } else {
- var roles []attr.Value
- resRoles := user.Roles
- slices.Sort(resRoles)
- for _, role := range resRoles {
- roles = append(roles, types.StringValue(string(role)))
- }
- rolesSet, diags := types.SetValue(types.StringType, roles)
- if diags.HasError() {
- return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags))
- }
- model.Roles = types.List(rolesSet)
- }
-
- // Set remaining attributes
- model.Host = types.StringValue(user.Host)
- model.Port = types.Int64Value(int64(user.Port))
- model.Region = types.StringValue(region)
- model.Status = types.StringValue(user.Status)
- model.DefaultDatabase = types.StringValue(user.DefaultDatabase)
-
- return nil
-}
-
-// mapFields maps the API response to a resourceModel.
-func mapFields(userResp *v3beta1api.GetUserResponse, model *resourceModel, region string) error {
- if userResp == nil {
- return fmt.Errorf("response is nil")
- }
- if model == nil {
- return fmt.Errorf("model input is nil")
- }
- user := userResp
-
- // Handle user ID
- var userId int64
- if model.UserId.ValueInt64() != 0 {
- userId = model.UserId.ValueInt64()
- } else if user.Id != 0 {
- userId = user.Id
- } else {
- return fmt.Errorf("user id not present")
- }
-
- // Set main attributes
- model.Id = types.Int64Value(userId)
- model.UserId = types.Int64Value(userId)
- model.Username = types.StringValue(user.Username)
-
- // Map roles
- if userResp.Roles != nil {
- resRoles := userResp.Roles
- slices.Sort(resRoles)
-
- var roles []attr.Value
- for _, role := range resRoles {
- roles = append(roles, types.StringValue(string(role)))
- }
-
- rolesSet, diags := types.ListValue(types.StringType, roles)
- if diags.HasError() {
- return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags))
- }
- model.Roles = rolesSet
- }
-
- // Ensure roles is not null
- if model.Roles.IsNull() || model.Roles.IsUnknown() {
- model.Roles = types.List(types.SetNull(types.StringType))
- }
-
- // Set connection details
- model.Host = types.StringValue(user.Host)
- model.Port = types.Int64Value(int64(user.Port))
- model.Region = types.StringValue(region)
- return nil
-}
-
-// mapFieldsCreate maps the API response from creating a user to a resourceModel.
-func mapFieldsCreate(userResp *v3beta1api.CreateUserResponse, model *resourceModel, region string) error {
- if userResp == nil {
- return fmt.Errorf("response is nil")
- }
- if model == nil {
- return fmt.Errorf("model input is nil")
- }
- user := userResp
-
- userId := user.Id
- model.Id = types.Int64Value(userId)
- model.UserId = types.Int64Value(userId)
- model.Username = types.StringValue(user.Username)
-
- model.Password = types.StringValue(user.Password)
-
- if user.Roles != nil {
- resRoles := user.Roles
- slices.Sort(resRoles)
-
- var roles []attr.Value
- for _, role := range resRoles {
- roles = append(roles, types.StringValue(string(role)))
- }
- rolesList, diags := types.ListValue(types.StringType, roles)
- if diags.HasError() {
- return fmt.Errorf("failed to map roles: %w", core.DiagsToError(diags))
- }
- model.Roles = rolesList
- }
-
- if model.Roles.IsNull() || model.Roles.IsUnknown() {
- model.Roles = types.List(types.SetNull(types.StringType))
- }
-
- model.Password = types.StringValue(user.Password)
- model.Uri = types.StringValue(user.Uri)
-
- model.Host = types.StringValue(user.Host)
- model.Port = types.Int64Value(int64(user.Port))
- model.Region = types.StringValue(region)
- model.Status = types.StringValue(user.Status)
- model.DefaultDatabase = types.StringValue(user.DefaultDatabase)
- model.Uri = types.StringValue(user.Uri)
-
- return nil
-}
-
-// toCreatePayload converts a resourceModel to an API CreateUserRequestPayload.
-func toCreatePayload(
- model *resourceModel,
- roles []string,
-) (*v3beta1api.CreateUserRequestPayload, error) {
- if model == nil {
- return nil, fmt.Errorf("nil model")
- }
-
- pl := v3beta1api.CreateUserRequestPayload{
- Username: model.Username.ValueString(),
- Roles: roles,
- }
- slices.Sort(roles)
- if !model.DefaultDatabase.IsNull() || !model.DefaultDatabase.IsUnknown() {
- pl.DefaultDatabase = conversion.StringValueToPointer(model.DefaultDatabase)
- }
-
- return &pl, nil
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/user/mapper_test.go b/stackit/internal/services/sqlserverflexbeta/user/mapper_test.go
deleted file mode 100644
index be27c3e1..00000000
--- a/stackit/internal/services/sqlserverflexbeta/user/mapper_test.go
+++ /dev/null
@@ -1,525 +0,0 @@
-package sqlserverflexbeta
-
-import (
- "testing"
-
- "github.com/google/go-cmp/cmp"
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/types"
-
- sqlserverflexbeta "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3beta1api"
-)
-
-func TestMapDataSourceFields(t *testing.T) {
- const testRegion = "region"
- tests := []struct {
- description string
- input *sqlserverflexbeta.GetUserResponse
- region string
- expected dataSourceModel
- isValid bool
- }{
- {
- "default_values",
- &sqlserverflexbeta.GetUserResponse{},
- testRegion,
- dataSourceModel{
- Id: types.StringValue("pid,region,iid,1"),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringValue(""),
- Roles: types.List(types.SetNull(types.StringType)),
- Host: types.StringValue(""),
- Port: types.Int64Value(0),
- Region: types.StringValue(testRegion),
- Status: types.StringValue(""),
- DefaultDatabase: types.StringValue(""),
- },
- true,
- },
- {
- "simple_values",
- &sqlserverflexbeta.GetUserResponse{
- Roles: []string{
- "role_1",
- "role_2",
- "",
- },
- Username: ("username"),
- Host: ("host"),
- Port: (int32(1234)),
- Status: ("active"),
- DefaultDatabase: ("default_db"),
- },
- testRegion,
- dataSourceModel{
- Id: types.StringValue("pid,region,iid,1"),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringValue("username"),
- Roles: types.List(
- types.SetValueMust(
- types.StringType, []attr.Value{
- types.StringValue(""),
- types.StringValue("role_1"),
- types.StringValue("role_2"),
- },
- ),
- ),
- Host: types.StringValue("host"),
- Port: types.Int64Value(1234),
- Region: types.StringValue(testRegion),
- Status: types.StringValue("active"),
- DefaultDatabase: types.StringValue("default_db"),
- },
- true,
- },
- {
- "null_fields_and_int_conversions",
- &sqlserverflexbeta.GetUserResponse{
- Id: (int64(1)),
- Roles: []string{},
- Username: "",
- Host: "",
- Port: (int32(2123456789)),
- },
- testRegion,
- dataSourceModel{
- Id: types.StringValue("pid,region,iid,1"),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringValue(""),
- Roles: types.List(types.SetValueMust(types.StringType, []attr.Value{})),
- Host: types.StringValue(""),
- Port: types.Int64Value(2123456789),
- Region: types.StringValue(testRegion),
- DefaultDatabase: types.StringValue(""),
- Status: types.StringValue(""),
- },
- true,
- },
- {
- "nil_response",
- nil,
- testRegion,
- dataSourceModel{},
- false,
- },
- {
- "nil_response_2",
- &sqlserverflexbeta.GetUserResponse{},
- testRegion,
- dataSourceModel{},
- false,
- },
- {
- "no_resource_id",
- &sqlserverflexbeta.GetUserResponse{},
- testRegion,
- dataSourceModel{},
- false,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.description, func(t *testing.T) {
- state := &dataSourceModel{
- ProjectId: tt.expected.ProjectId,
- InstanceId: tt.expected.InstanceId,
- UserId: tt.expected.UserId,
- }
- err := mapDataSourceFields(tt.input, state, tt.region)
- if !tt.isValid && err == nil {
- t.Fatalf("Should have failed")
- }
- if tt.isValid && err != nil {
- t.Fatalf("Should not have failed: %v", err)
- }
- if tt.isValid {
- diff := cmp.Diff(&tt.expected, state)
- if diff != "" {
- t.Fatalf("Data does not match: %s", diff)
- }
- }
- },
- )
- }
-}
-
-func TestMapFieldsCreate(t *testing.T) {
- const testRegion = "region"
- tests := []struct {
- description string
- input *sqlserverflexbeta.CreateUserResponse
- region string
- expected resourceModel
- isValid bool
- }{
- {
- "default_values",
- &sqlserverflexbeta.CreateUserResponse{
- Id: int64(1),
- Password: "",
- },
- testRegion,
- resourceModel{
- Id: types.Int64Value(1),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringValue(""),
- Roles: types.List(types.SetNull(types.StringType)),
- Password: types.StringValue(""),
- Host: types.StringValue(""),
- Port: types.Int64Value(0),
- Region: types.StringValue(testRegion),
- DefaultDatabase: types.StringValue(""),
- Status: types.StringValue(""),
- Uri: types.StringValue(""),
- },
- true,
- },
- {
- "simple_values",
- &sqlserverflexbeta.CreateUserResponse{
- Id: int64(2),
- Roles: []string{
- "role_1",
- "role_2",
- "",
- },
- Username: "username",
- Password: "password",
- Host: "host",
- Port: int32(1234),
- Status: "status",
- DefaultDatabase: "default_db",
- Uri: "myURI",
- },
- testRegion,
- resourceModel{
- Id: types.Int64Value(2),
- UserId: types.Int64Value(2),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringValue("username"),
- Roles: types.List(
- types.SetValueMust(
- types.StringType, []attr.Value{
- types.StringValue(""),
- types.StringValue("role_1"),
- types.StringValue("role_2"),
- },
- ),
- ),
- Password: types.StringValue("password"),
- Host: types.StringValue("host"),
- Port: types.Int64Value(1234),
- Region: types.StringValue(testRegion),
- Status: types.StringValue("status"),
- DefaultDatabase: types.StringValue("default_db"),
- Uri: types.StringValue("myURI"),
- },
- true,
- },
- {
- "null_fields_and_int_conversions",
- &sqlserverflexbeta.CreateUserResponse{
- Id: (int64(3)),
- Roles: []string{},
- Username: "",
- Password: (""),
- Host: "",
- Port: (int32(2123456789)),
- },
- testRegion,
- resourceModel{
- Id: types.Int64Value(3),
- UserId: types.Int64Value(3),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringValue(""),
- Roles: types.List(types.SetValueMust(types.StringType, []attr.Value{})),
- Password: types.StringValue(""),
- Host: types.StringValue(""),
- Port: types.Int64Value(2123456789),
- Region: types.StringValue(testRegion),
- DefaultDatabase: types.StringValue(""),
- Status: types.StringValue(""),
- Uri: types.StringValue(""),
- },
- true,
- },
- {
- "nil_response",
- nil,
- testRegion,
- resourceModel{},
- false,
- },
- //{
- // "nil_response_2",
- // &sqlserverflexbeta.CreateUserResponse{},
- // testRegion,
- // resourceModel{},
- // false,
- // },
- //{
- // "no_resource_id",
- // &sqlserverflexbeta.CreateUserResponse{},
- // testRegion,
- // resourceModel{},
- // false,
- // },
- }
- for _, tt := range tests {
- t.Run(
- tt.description, func(t *testing.T) {
- state := &resourceModel{
- ProjectId: tt.expected.ProjectId,
- InstanceId: tt.expected.InstanceId,
- }
- err := mapFieldsCreate(tt.input, state, tt.region)
- if !tt.isValid && err == nil {
- t.Fatalf("Should have failed")
- }
- if tt.isValid && err != nil {
- t.Fatalf("Should not have failed: %v", err)
- }
- if tt.isValid {
- diff := cmp.Diff(&tt.expected, state)
- if diff != "" {
- t.Fatalf("Data does not match: %s", diff)
- }
- }
- },
- )
- }
-}
-
-func TestMapFields(t *testing.T) {
- const testRegion = "region"
- tests := []struct {
- description string
- input *sqlserverflexbeta.GetUserResponse
- region string
- expected resourceModel
- isValid bool
- }{
- {
- "default_values",
- &sqlserverflexbeta.GetUserResponse{},
- testRegion,
- resourceModel{
- Id: types.Int64Value(1),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringValue(""),
- Roles: types.List(types.SetNull(types.StringType)),
- Host: types.StringValue(""),
- Port: types.Int64Value(0),
- Region: types.StringValue(testRegion),
- },
- true,
- },
- {
- "simple_values",
- &sqlserverflexbeta.GetUserResponse{
- Roles: []string{
- "role_2",
- "role_1",
- "",
- },
- Username: ("username"),
- Host: ("host"),
- Port: (int32(1234)),
- },
- testRegion,
- resourceModel{
- Id: types.Int64Value(2),
- UserId: types.Int64Value(2),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringValue("username"),
- Roles: types.List(
- types.SetValueMust(
- types.StringType, []attr.Value{
- types.StringValue(""),
- types.StringValue("role_1"),
- types.StringValue("role_2"),
- },
- ),
- ),
- Host: types.StringValue("host"),
- Port: types.Int64Value(1234),
- Region: types.StringValue(testRegion),
- },
- true,
- },
- {
- "null_fields_and_int_conversions",
- &sqlserverflexbeta.GetUserResponse{
- Id: (int64(1)),
- Roles: []string{},
- Username: "",
- Host: "",
- Port: (int32(2123456789)),
- },
- testRegion,
- resourceModel{
- Id: types.Int64Value(1),
- UserId: types.Int64Value(1),
- InstanceId: types.StringValue("iid"),
- ProjectId: types.StringValue("pid"),
- Username: types.StringValue(""),
- Roles: types.List(types.SetValueMust(types.StringType, []attr.Value{})),
- Host: types.StringValue(""),
- Port: types.Int64Value(2123456789),
- Region: types.StringValue(testRegion),
- },
- true,
- },
- {
- "nil_response",
- nil,
- testRegion,
- resourceModel{},
- false,
- },
- {
- "nil_response_2",
- &sqlserverflexbeta.GetUserResponse{},
- testRegion,
- resourceModel{},
- false,
- },
- {
- "no_resource_id",
- &sqlserverflexbeta.GetUserResponse{},
- testRegion,
- resourceModel{},
- false,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.description, func(t *testing.T) {
- state := &resourceModel{
- ProjectId: tt.expected.ProjectId,
- InstanceId: tt.expected.InstanceId,
- UserId: tt.expected.UserId,
- }
- err := mapFields(tt.input, state, tt.region)
- if !tt.isValid && err == nil {
- t.Fatalf("Should have failed")
- }
- if tt.isValid && err != nil {
- t.Fatalf("Should not have failed: %v", err)
- }
- if tt.isValid {
- diff := cmp.Diff(&tt.expected, state)
- if diff != "" {
- t.Fatalf("Data does not match: %s", diff)
- }
- }
- },
- )
- }
-}
-
-func TestToCreatePayload(t *testing.T) {
- tests := []struct {
- description string
- input *resourceModel
- inputRoles []string
- expected *sqlserverflexbeta.CreateUserRequestPayload
- isValid bool
- }{
- {
- "default_values",
- &resourceModel{},
- []string{},
- &sqlserverflexbeta.CreateUserRequestPayload{
- Roles: []string{},
- Username: "",
- },
- true,
- },
- {
- "default_values",
- &resourceModel{
- Username: types.StringValue("username"),
- },
- []string{
- "role_1",
- "role_2",
- },
- &sqlserverflexbeta.CreateUserRequestPayload{
- Roles: []string{
- "role_1",
- "role_2",
- },
- Username: ("username"),
- },
- true,
- },
- {
- "null_fields_and_int_conversions",
- &resourceModel{
- Username: types.StringValue(""),
- },
- []string{
- "",
- },
- &sqlserverflexbeta.CreateUserRequestPayload{
- Roles: []string{
- "",
- },
- Username: "",
- },
- true,
- },
- {
- "nil_model",
- nil,
- []string{},
- nil,
- false,
- },
- {
- "nil_roles",
- &resourceModel{
- Username: types.StringValue("username"),
- },
- []string{},
- &sqlserverflexbeta.CreateUserRequestPayload{
- Roles: []string{},
- Username: ("username"),
- },
- true,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.description, func(t *testing.T) {
- output, err := toCreatePayload(tt.input, tt.inputRoles)
- if !tt.isValid && err == nil {
- t.Fatalf("Should have failed")
- }
- if tt.isValid && err != nil {
- t.Fatalf("Should not have failed: %v", err)
- }
- if tt.isValid {
- diff := cmp.Diff(output, tt.expected)
- if diff != "" {
- t.Fatalf("Data does not match: %s", diff)
- }
- }
- },
- )
- }
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/user/planModifiers.yaml b/stackit/internal/services/sqlserverflexbeta/user/planModifiers.yaml
deleted file mode 100644
index 43b029e8..00000000
--- a/stackit/internal/services/sqlserverflexbeta/user/planModifiers.yaml
+++ /dev/null
@@ -1,53 +0,0 @@
-fields:
- - name: 'id'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'instance_id'
- validators:
- - validate.NoSeparator
- - validate.UUID
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'project_id'
- validators:
- - validate.NoSeparator
- - validate.UUID
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'region'
- modifiers:
- - 'RequiresReplace'
- - 'RequiresReplace'
-
- - name: 'user_id'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'username'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'roles'
- modifiers:
- - 'UseStateForUnknown'
- - 'RequiresReplace'
-
- - name: 'password'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'uri'
- modifiers:
- - 'UseStateForUnknown'
-
- - name: 'status'
- modifiers:
- - 'UseStateForUnknown'
diff --git a/stackit/internal/services/sqlserverflexbeta/user/resource.go b/stackit/internal/services/sqlserverflexbeta/user/resource.go
deleted file mode 100644
index 0c04f31b..00000000
--- a/stackit/internal/services/sqlserverflexbeta/user/resource.go
+++ /dev/null
@@ -1,578 +0,0 @@
-package sqlserverflexbeta
-
-import (
- "context"
- _ "embed"
- "errors"
- "fmt"
- "net/http"
- "slices"
- "strconv"
- "strings"
- "time"
-
- "github.com/hashicorp/terraform-plugin-framework/path"
- "github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/core/oapierror"
-
- sqlserverflexbeta "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3beta1api"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
- sqlserverflexbetaUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/utils"
- sqlserverflexbetaWait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexbeta"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-
- sqlserverflexbetaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/user/resources_gen"
-)
-
-var (
- _ resource.Resource = &userResource{}
- _ resource.ResourceWithConfigure = &userResource{}
- _ resource.ResourceWithImportState = &userResource{}
- _ resource.ResourceWithModifyPlan = &userResource{}
- _ resource.ResourceWithIdentity = &userResource{}
- _ resource.ResourceWithValidateConfig = &userResource{}
-)
-
-func NewUserResource() resource.Resource {
- return &userResource{}
-}
-
-// resourceModel describes the resource data model.
-type resourceModel = sqlserverflexbetaResGen.UserModel
-
-// UserResourceIdentityModel describes the resource's identity attributes.
-type UserResourceIdentityModel struct {
- ProjectID types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- InstanceID types.String `tfsdk:"instance_id"`
- UserID types.Int64 `tfsdk:"user_id"`
-}
-
-type userResource struct {
- client *sqlserverflexbeta.APIClient
- providerData core.ProviderData
-}
-
-func (r *userResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_user"
-}
-
-// Configure adds the provider configured client to the resource.
-func (r *userResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
- var ok bool
- r.providerData, ok = conversion.ParseProviderData(ctx, req.ProviderData, &resp.Diagnostics)
- if !ok {
- return
- }
-
- apiClient := sqlserverflexbetaUtils.ConfigureClient(ctx, &r.providerData, &resp.Diagnostics)
- if resp.Diagnostics.HasError() {
- return
- }
- r.client = apiClient
- tflog.Info(ctx, "SQLServer Beta Flex user client configured")
-}
-
-// ModifyPlan implements resource.ResourceWithModifyPlan.
-// Use the modifier to set the effective region in the current plan.
-func (r *userResource) ModifyPlan(
- ctx context.Context,
- req resource.ModifyPlanRequest,
- resp *resource.ModifyPlanResponse,
-) { // nolint:gocritic // function signature required by Terraform
- var configModel resourceModel
- // skip initial empty configuration to avoid follow-up errors
- if req.Config.Raw.IsNull() {
- return
- }
- resp.Diagnostics.Append(req.Config.Get(ctx, &configModel)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- var planModel resourceModel
- resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- utils.AdaptRegion(ctx, configModel.Region, &planModel.Region, r.providerData.GetRegion(), resp)
- if resp.Diagnostics.HasError() {
- return
- }
-
- //// TODO: verify if this is needed - START
- // var planRoles []string
- // diags := planModel.Roles.ElementsAs(ctx, &planRoles, false)
- // resp.Diagnostics.Append(diags...)
- // if diags.HasError() {
- // return
- //}
- // slices.Sort(planRoles)
- // var roles []attr.Value
- // for _, role := range planRoles {
- // roles = append(roles, types.StringValue(string(role)))
- //}
- // rolesSet, diags := types.ListValue(types.StringType, roles)
- // resp.Diagnostics.Append(diags...)
- // if diags.HasError() {
- // return
- //}
- // planModel.Roles = rolesSet
- //// TODO: verify if this is needed - END
-
- resp.Diagnostics.Append(resp.Plan.Set(ctx, planModel)...)
- if resp.Diagnostics.HasError() {
- return
- }
-}
-
-//go:embed planModifiers.yaml
-var modifiersFileByte []byte
-
-// Schema defines the schema for the resource.
-func (r *userResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
- s := sqlserverflexbetaResGen.UserResourceSchema(ctx)
-
- fields, err := utils.ReadModifiersConfig(modifiersFileByte)
- if err != nil {
- resp.Diagnostics.AddError("error during read modifiers config file", err.Error())
- return
- }
-
- err = utils.AddPlanModifiersToResourceSchema(fields, &s)
- if err != nil {
- resp.Diagnostics.AddError("error adding plan modifiers", err.Error())
- return
- }
- resp.Schema = s
-}
-
-// IdentitySchema defines the schema for the resource's identity attributes.
-func (r *userResource) IdentitySchema(
- _ context.Context,
- _ resource.IdentitySchemaRequest,
- response *resource.IdentitySchemaResponse,
-) {
- response.IdentitySchema = identityschema.Schema{
- Attributes: map[string]identityschema.Attribute{
- "project_id": identityschema.StringAttribute{
- RequiredForImport: true, // must be set during import by the practitioner
- },
- "region": identityschema.StringAttribute{
- RequiredForImport: true, // can be defaulted by the provider configuration
- },
- "instance_id": identityschema.StringAttribute{
- RequiredForImport: true, // can be defaulted by the provider configuration
- },
- "user_id": identityschema.Int64Attribute{
- RequiredForImport: true, // can be defaulted by the provider configuration
- },
- },
- }
-}
-
-func (r *userResource) ValidateConfig(
- ctx context.Context,
- req resource.ValidateConfigRequest,
- resp *resource.ValidateConfigResponse,
-) {
- var data resourceModel
-
- resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- var roles []string
- diags := data.Roles.ElementsAs(ctx, &roles, false)
- resp.Diagnostics.Append(diags...)
- if diags.HasError() {
- return
- }
-
- var resRoles []string
- for _, role := range roles {
- if slices.Contains(resRoles, role) {
- resp.Diagnostics.AddAttributeError(
- path.Root("roles"),
- "Attribute Configuration Error",
- "defined roles MUST NOT contain duplicates",
- )
- return
- }
- resRoles = append(resRoles, role)
- }
-}
-
-// Create creates the resource and sets the initial Terraform state.
-func (r *userResource) Create(
- ctx context.Context,
- req resource.CreateRequest,
- resp *resource.CreateResponse,
-) { // nolint:gocritic // function signature required by Terraform
- var model resourceModel
- diags := req.Plan.Get(ctx, &model)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := model.ProjectId.ValueString()
- instanceId := model.InstanceId.ValueString()
- region := model.Region.ValueString()
-
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
- ctx = tflog.SetField(ctx, "region", region)
-
- var roles []string
- if !model.Roles.IsNull() && !model.Roles.IsUnknown() {
- diags = model.Roles.ElementsAs(ctx, &roles, false)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
- slices.Sort(roles)
- }
-
- // Generate API request body from model
- payload, err := toCreatePayload(&model, roles)
- if err != nil {
- core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating user", fmt.Sprintf("Creating API payload: %v", err))
- return
- }
- // Create new user
- userResp, err := r.client.DefaultAPI.CreateUserRequest(
- ctx,
- projectId,
- region,
- instanceId,
- ).CreateUserRequestPayload(*payload).Execute()
- if err != nil {
- core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating user", fmt.Sprintf("Calling API: %v", err))
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- if userResp == nil || userResp.Id == 0 {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error creating user",
- "API didn't return user Id. A user might have been created",
- )
- return
- }
-
- userId := userResp.Id
- ctx = tflog.SetField(ctx, "user_id", userId)
-
- // Set data returned by API in identity
- identity := UserResourceIdentityModel{
- ProjectID: types.StringValue(projectId),
- Region: types.StringValue(region),
- InstanceID: types.StringValue(instanceId),
- UserID: types.Int64Value(userId),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- err = mapFieldsCreate(userResp, &model, region)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error creating user",
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
-
- waitResp, err := sqlserverflexbetaWait.CreateUserWaitHandler(
- ctx,
- r.client.DefaultAPI,
- projectId,
- instanceId,
- region,
- userId,
- ).SetSleepBeforeWait(
- 90 * time.Second,
- ).SetTimeout(
- 90 * time.Minute,
- ).WaitWithContext(ctx)
-
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "create user",
- fmt.Sprintf("Instance creation waiting: %v", err),
- )
- return
- }
-
- if waitResp.Id == 0 {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "create user",
- "Instance creation waiting: returned id is nil",
- )
- return
- }
-
- // Map response body to schema
- err = mapFields(waitResp, &model, region)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error creating user",
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
- // Set state to fully populated data
- diags = resp.State.Set(ctx, model)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
- tflog.Info(ctx, "SQLServer Flex user created")
-}
-
-// Read refreshes the Terraform state with the latest data.
-func (r *userResource) Read(
- ctx context.Context,
- req resource.ReadRequest,
- resp *resource.ReadResponse,
-) { // nolint:gocritic // function signature required by Terraform
- var model resourceModel
- diags := req.State.Get(ctx, &model)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := model.ProjectId.ValueString()
- instanceId := model.InstanceId.ValueString()
- userId := model.UserId.ValueInt64()
- region := r.providerData.GetRegionWithOverride(model.Region)
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
- ctx = tflog.SetField(ctx, "user_id", userId)
- ctx = tflog.SetField(ctx, "region", region)
-
- recordSetResp, err := r.client.DefaultAPI.GetUserRequest(ctx, projectId, region, instanceId, userId).Execute()
- if err != nil {
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(
- err,
- &oapiErr,
- )
- //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
- if ok && oapiErr.StatusCode == http.StatusNotFound {
- resp.State.RemoveResource(ctx)
- return
- }
- core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading user", fmt.Sprintf("Calling API: %v", err))
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- // Map response body to schema
- err = mapFields(recordSetResp, &model, region)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error reading user",
- fmt.Sprintf("Processing API payload: %v", err),
- )
- return
- }
-
- // Set data returned by API in identity
- identity := UserResourceIdentityModel{
- ProjectID: types.StringValue(projectId),
- Region: types.StringValue(region),
- InstanceID: types.StringValue(instanceId),
- UserID: types.Int64Value(userId),
- }
- resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- // Set refreshed state
- diags = resp.State.Set(ctx, model)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
- tflog.Info(ctx, "SQLServer Flex user read")
-}
-
-// Update updates the resource and sets the updated Terraform state on success.
-func (r *userResource) Update(
- ctx context.Context,
- _ resource.UpdateRequest,
- resp *resource.UpdateResponse,
-) { // nolint:gocritic // function signature required by Terraform
- // Update shouldn't be called
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error updating user",
- "an SQL server user can not be updated, only created",
- )
-}
-
-// Delete deletes the resource and removes the Terraform state on success.
-func (r *userResource) Delete(
- ctx context.Context,
- req resource.DeleteRequest,
- resp *resource.DeleteResponse,
-) { // nolint:gocritic // function signature required by Terraform
- // Retrieve values from plan
- var model resourceModel
- diags := req.State.Get(ctx, &model)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- ctx = core.InitProviderContext(ctx)
-
- projectId := model.ProjectId.ValueString()
- instanceId := model.InstanceId.ValueString()
- userId := model.UserId.ValueInt64()
- region := model.Region.ValueString()
- ctx = tflog.SetField(ctx, "project_id", projectId)
- ctx = tflog.SetField(ctx, "instance_id", instanceId)
- ctx = tflog.SetField(ctx, "user_id", userId)
- ctx = tflog.SetField(ctx, "region", region)
-
- // Delete existing record set
- // err := r.client.DeleteUserRequest(ctx, projectId, region, instanceId, userId).Execute()
- err := r.client.DefaultAPI.DeleteUserRequest(ctx, projectId, region, instanceId, userId).Execute()
- if err != nil {
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(err, &oapiErr)
- if !ok {
- // TODO err handling
- return
- }
-
- switch oapiErr.StatusCode {
- case http.StatusNotFound:
- resp.State.RemoveResource(ctx)
- return
- // case http.StatusInternalServerError:
- // tflog.Warn(ctx, "[delete user] Wait handler got error 500")
- // return false, nil, nil
- default:
- // TODO err handling
- return
- }
- }
- // Delete existing record set
- _, err = sqlserverflexbetaWait.DeleteUserWaitHandler(ctx, r.client.DefaultAPI, projectId, region, instanceId, userId).
- WaitWithContext(ctx)
- if err != nil {
- core.LogAndAddError(ctx, &resp.Diagnostics, "User Delete Error", fmt.Sprintf("Calling API: %v", err))
- return
- }
-
- ctx = core.LogResponse(ctx)
-
- resp.State.RemoveResource(ctx)
-
- tflog.Info(ctx, "SQLServer Flex user deleted")
-}
-
-// ImportState imports a resource into the Terraform state on success.
-// The expected format of the resource import identifier is: project_id,zone_id,record_set_id
-func (r *userResource) ImportState(
- ctx context.Context,
- req resource.ImportStateRequest,
- resp *resource.ImportStateResponse,
-) {
- ctx = core.InitProviderContext(ctx)
-
- if req.ID != "" {
- idParts := strings.Split(req.ID, core.Separator)
-
- if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" {
- core.LogAndAddError(
- ctx, &resp.Diagnostics,
- "Error importing user",
- fmt.Sprintf(
- "Expected import identifier with format [project_id],[region],[instance_id],[user_id], got %q",
- req.ID,
- ),
- )
- return
- }
-
- userId, err := strconv.ParseInt(idParts[3], 10, 64)
- if err != nil {
- core.LogAndAddError(
- ctx,
- &resp.Diagnostics,
- "Error importing user",
- fmt.Sprintf("Invalid user_id format: %q. It must be a valid integer.", idParts[3]),
- )
- return
- }
-
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userId)...)
-
- tflog.Info(ctx, "SQLServer Flex user state imported")
-
- return
- }
-
- // If no ID is provided, attempt to read identity attributes from the import configuration
- var identityData UserResourceIdentityModel
- resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- projectId := identityData.ProjectID.ValueString()
- region := identityData.Region.ValueString()
- instanceId := identityData.InstanceID.ValueString()
- userId := identityData.UserID.ValueInt64()
-
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...)
- resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userId)...)
-
- core.LogAndAddWarning(
- ctx,
- &resp.Diagnostics,
- "SQLServer Flex user imported with empty password",
- "The user password is not imported as it is only available upon creation of a new user. The password field will be empty.",
- )
- tflog.Info(ctx, "SQLServer Flex user state imported")
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/user/resources_gen/user_resource_gen.go b/stackit/internal/services/sqlserverflexbeta/user/resources_gen/user_resource_gen.go
deleted file mode 100644
index f181f79c..00000000
--- a/stackit/internal/services/sqlserverflexbeta/user/resources_gen/user_resource_gen.go
+++ /dev/null
@@ -1,111 +0,0 @@
-// Code generated by terraform-plugin-framework-generator DO NOT EDIT.
-
-package sqlserverflexbeta
-
-import (
- "context"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "github.com/hashicorp/terraform-plugin-framework/types"
-
- "github.com/hashicorp/terraform-plugin-framework/resource/schema"
-)
-
-func UserResourceSchema(ctx context.Context) schema.Schema {
- return schema.Schema{
- Attributes: map[string]schema.Attribute{
- "default_database": schema.StringAttribute{
- Optional: true,
- Computed: true,
- Description: "The default database for a user of the instance.",
- MarkdownDescription: "The default database for a user of the instance.",
- },
- "host": schema.StringAttribute{
- Computed: true,
- Description: "The host of the instance in which the user belongs to.",
- MarkdownDescription: "The host of the instance in which the user belongs to.",
- },
- "id": schema.Int64Attribute{
- Computed: true,
- Description: "The ID of the user.",
- MarkdownDescription: "The ID of the user.",
- },
- "instance_id": schema.StringAttribute{
- Optional: true,
- Computed: true,
- Description: "The ID of the instance.",
- MarkdownDescription: "The ID of the instance.",
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: "The password for the user.",
- MarkdownDescription: "The password for the user.",
- },
- "port": schema.Int64Attribute{
- Computed: true,
- Description: "The port of the instance in which the user belongs to.",
- MarkdownDescription: "The port of the instance in which the user belongs to.",
- },
- "project_id": schema.StringAttribute{
- Optional: true,
- Computed: true,
- Description: "The STACKIT project ID.",
- MarkdownDescription: "The STACKIT project ID.",
- },
- "region": schema.StringAttribute{
- Optional: true,
- Computed: true,
- Description: "The region which should be addressed",
- MarkdownDescription: "The region which should be addressed",
- Validators: []validator.String{
- stringvalidator.OneOf(
- "eu01",
- ),
- },
- },
- "roles": schema.ListAttribute{
- ElementType: types.StringType,
- Required: true,
- Description: "A list containing the user roles for the instance. A list with the valid user roles can be retrieved using the List Roles endpoint.",
- MarkdownDescription: "A list containing the user roles for the instance. A list with the valid user roles can be retrieved using the List Roles endpoint.",
- },
- "status": schema.StringAttribute{
- Computed: true,
- Description: "The current status of the user.",
- MarkdownDescription: "The current status of the user.",
- },
- "uri": schema.StringAttribute{
- Computed: true,
- Description: "The connection string for the user to the instance.",
- MarkdownDescription: "The connection string for the user to the instance.",
- },
- "user_id": schema.Int64Attribute{
- Optional: true,
- Computed: true,
- Description: "The ID of the user.",
- MarkdownDescription: "The ID of the user.",
- },
- "username": schema.StringAttribute{
- Required: true,
- Description: "The name of the user.",
- MarkdownDescription: "The name of the user.",
- },
- },
- }
-}
-
-type UserModel struct {
- DefaultDatabase types.String `tfsdk:"default_database"`
- Host types.String `tfsdk:"host"`
- Id types.Int64 `tfsdk:"id"`
- InstanceId types.String `tfsdk:"instance_id"`
- Password types.String `tfsdk:"password"`
- Port types.Int64 `tfsdk:"port"`
- ProjectId types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- Roles types.List `tfsdk:"roles"`
- Status types.String `tfsdk:"status"`
- Uri types.String `tfsdk:"uri"`
- UserId types.Int64 `tfsdk:"user_id"`
- Username types.String `tfsdk:"username"`
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/utils/util.go b/stackit/internal/services/sqlserverflexbeta/utils/util.go
deleted file mode 100644
index cdb3e4d8..00000000
--- a/stackit/internal/services/sqlserverflexbeta/utils/util.go
+++ /dev/null
@@ -1,48 +0,0 @@
-package utils
-
-import (
- "context"
- "fmt"
-
- sqlserverflex "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3beta1api"
-
- "github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-)
-
-func ConfigureClient(
- ctx context.Context,
- providerData *core.ProviderData,
- diags *diag.Diagnostics,
-) *sqlserverflex.APIClient {
- apiClientConfigOptions := []config.ConfigurationOption{
- config.WithCustomAuth(providerData.RoundTripper),
- utils.UserAgentConfigOption(providerData.Version),
- }
- if providerData.SQLServerFlexCustomEndpoint != "" {
- apiClientConfigOptions = append(
- apiClientConfigOptions,
- config.WithEndpoint(providerData.SQLServerFlexCustomEndpoint),
- )
- } else {
- apiClientConfigOptions = append(apiClientConfigOptions, config.WithRegion(providerData.GetRegion()))
- }
- apiClient, err := sqlserverflex.NewAPIClient(apiClientConfigOptions...)
- if err != nil {
- core.LogAndAddError(
- ctx,
- diags,
- "Error configuring API client",
- fmt.Sprintf(
- "Configuring client: %v. This is an error related to the provider configuration, not to the resource configuration",
- err,
- ),
- )
- return nil
- }
-
- return apiClient
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/utils/util_test.go b/stackit/internal/services/sqlserverflexbeta/utils/util_test.go
deleted file mode 100644
index 92c6ffaa..00000000
--- a/stackit/internal/services/sqlserverflexbeta/utils/util_test.go
+++ /dev/null
@@ -1,98 +0,0 @@
-package utils
-
-import (
- "context"
- "os"
- "reflect"
- "testing"
-
- "github.com/hashicorp/terraform-plugin-framework/diag"
- sdkClients "github.com/stackitcloud/stackit-sdk-go/core/clients"
- "github.com/stackitcloud/stackit-sdk-go/core/config"
-
- "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3beta1api"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
-)
-
-const (
- testVersion = "1.2.3"
- testCustomEndpoint = "https://sqlserverflex-custom-endpoint.api.stackit.cloud"
-)
-
-func TestConfigureClient(t *testing.T) {
- /* mock authentication by setting service account token env variable */
- os.Clearenv()
- err := os.Setenv(sdkClients.ServiceAccountToken, "mock-val")
- if err != nil {
- t.Errorf("error setting env variable: %v", err)
- }
-
- type args struct {
- providerData *core.ProviderData
- }
- tests := []struct {
- name string
- args args
- wantErr bool
- expected *v3beta1api.APIClient
- }{
- {
- name: "default endpoint",
- args: args{
- providerData: &core.ProviderData{
- Version: testVersion,
- },
- },
- expected: func() *v3beta1api.APIClient {
- apiClient, err := v3beta1api.NewAPIClient(
- config.WithRegion("eu01"),
- utils.UserAgentConfigOption(testVersion),
- )
- if err != nil {
- t.Errorf("error configuring client: %v", err)
- }
- return apiClient
- }(),
- wantErr: false,
- },
- {
- name: "custom endpoint",
- args: args{
- providerData: &core.ProviderData{
- Version: testVersion,
- SQLServerFlexCustomEndpoint: testCustomEndpoint,
- },
- },
- expected: func() *v3beta1api.APIClient {
- apiClient, err := v3beta1api.NewAPIClient(
- utils.UserAgentConfigOption(testVersion),
- config.WithEndpoint(testCustomEndpoint),
- )
- if err != nil {
- t.Errorf("error configuring client: %v", err)
- }
- return apiClient
- }(),
- wantErr: false,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.name, func(t *testing.T) {
- ctx := context.Background()
- diags := diag.Diagnostics{}
-
- actual := ConfigureClient(ctx, tt.args.providerData, &diags)
- if diags.HasError() != tt.wantErr {
- t.Errorf("ConfigureClient() error = %v, want %v", diags.HasError(), tt.wantErr)
- }
-
- if !reflect.DeepEqual(actual.GetConfig(), tt.expected.GetConfig()) {
- t.Errorf("ConfigureClient() = %v, want %v", actual, tt.expected)
- }
- },
- )
- }
-}
diff --git a/stackit/internal/services/sqlserverflexbeta/versions/datasources_gen/version_data_source_gen.go b/stackit/internal/services/sqlserverflexbeta/versions/datasources_gen/version_data_source_gen.go
deleted file mode 100644
index 239b44d3..00000000
--- a/stackit/internal/services/sqlserverflexbeta/versions/datasources_gen/version_data_source_gen.go
+++ /dev/null
@@ -1,569 +0,0 @@
-// Code generated by terraform-plugin-framework-generator DO NOT EDIT.
-
-package sqlserverflexbeta
-
-import (
- "context"
- "fmt"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
- "github.com/hashicorp/terraform-plugin-framework/attr"
- "github.com/hashicorp/terraform-plugin-framework/diag"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
- "github.com/hashicorp/terraform-plugin-go/tftypes"
- "strings"
-
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
-)
-
-func VersionDataSourceSchema(ctx context.Context) schema.Schema {
- return schema.Schema{
- Attributes: map[string]schema.Attribute{
- "project_id": schema.StringAttribute{
- Required: true,
- Description: "The STACKIT project ID.",
- MarkdownDescription: "The STACKIT project ID.",
- },
- "region": schema.StringAttribute{
- Required: true,
- Description: "The region which should be addressed",
- MarkdownDescription: "The region which should be addressed",
- Validators: []validator.String{
- stringvalidator.OneOf(
- "eu01",
- ),
- },
- },
- "versions": schema.ListNestedAttribute{
- NestedObject: schema.NestedAttributeObject{
- Attributes: map[string]schema.Attribute{
- "beta": schema.BoolAttribute{
- Computed: true,
- Description: "Flag if the version is a beta version. If set the version may contain bugs and is not fully tested.",
- MarkdownDescription: "Flag if the version is a beta version. If set the version may contain bugs and is not fully tested.",
- },
- "deprecated": schema.StringAttribute{
- Computed: true,
- Description: "Timestamp in RFC3339 format which says when the version will no longer be supported by STACKIT.",
- MarkdownDescription: "Timestamp in RFC3339 format which says when the version will no longer be supported by STACKIT.",
- },
- "recommend": schema.BoolAttribute{
- Computed: true,
- Description: "Flag if the version is recommend by the STACKIT Team.",
- MarkdownDescription: "Flag if the version is recommend by the STACKIT Team.",
- },
- "version": schema.StringAttribute{
- Computed: true,
- Description: "The sqlserver version used for the instance.",
- MarkdownDescription: "The sqlserver version used for the instance.",
- },
- },
- CustomType: VersionsType{
- ObjectType: types.ObjectType{
- AttrTypes: VersionsValue{}.AttributeTypes(ctx),
- },
- },
- },
- Computed: true,
- Description: "A list containing available sqlserver versions.",
- MarkdownDescription: "A list containing available sqlserver versions.",
- },
- },
- }
-}
-
-type VersionModel struct {
- ProjectId types.String `tfsdk:"project_id"`
- Region types.String `tfsdk:"region"`
- Versions types.List `tfsdk:"versions"`
-}
-
-var _ basetypes.ObjectTypable = VersionsType{}
-
-type VersionsType struct {
- basetypes.ObjectType
-}
-
-func (t VersionsType) Equal(o attr.Type) bool {
- other, ok := o.(VersionsType)
-
- if !ok {
- return false
- }
-
- return t.ObjectType.Equal(other.ObjectType)
-}
-
-func (t VersionsType) String() string {
- return "VersionsType"
-}
-
-func (t VersionsType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributes := in.Attributes()
-
- betaAttribute, ok := attributes["beta"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `beta is missing from object`)
-
- return nil, diags
- }
-
- betaVal, ok := betaAttribute.(basetypes.BoolValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`beta expected to be basetypes.BoolValue, was: %T`, betaAttribute))
- }
-
- deprecatedAttribute, ok := attributes["deprecated"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `deprecated is missing from object`)
-
- return nil, diags
- }
-
- deprecatedVal, ok := deprecatedAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`deprecated expected to be basetypes.StringValue, was: %T`, deprecatedAttribute))
- }
-
- recommendAttribute, ok := attributes["recommend"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `recommend is missing from object`)
-
- return nil, diags
- }
-
- recommendVal, ok := recommendAttribute.(basetypes.BoolValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`recommend expected to be basetypes.BoolValue, was: %T`, recommendAttribute))
- }
-
- versionAttribute, ok := attributes["version"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `version is missing from object`)
-
- return nil, diags
- }
-
- versionVal, ok := versionAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`version expected to be basetypes.StringValue, was: %T`, versionAttribute))
- }
-
- if diags.HasError() {
- return nil, diags
- }
-
- return VersionsValue{
- Beta: betaVal,
- Deprecated: deprecatedVal,
- Recommend: recommendVal,
- Version: versionVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewVersionsValueNull() VersionsValue {
- return VersionsValue{
- state: attr.ValueStateNull,
- }
-}
-
-func NewVersionsValueUnknown() VersionsValue {
- return VersionsValue{
- state: attr.ValueStateUnknown,
- }
-}
-
-func NewVersionsValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (VersionsValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- // Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
- ctx := context.Background()
-
- for name, attributeType := range attributeTypes {
- attribute, ok := attributes[name]
-
- if !ok {
- diags.AddError(
- "Missing VersionsValue Attribute Value",
- "While creating a VersionsValue value, a missing attribute value was detected. "+
- "A VersionsValue must contain values for all attributes, even if null or unknown. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("VersionsValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
- )
-
- continue
- }
-
- if !attributeType.Equal(attribute.Type(ctx)) {
- diags.AddError(
- "Invalid VersionsValue Attribute Type",
- "While creating a VersionsValue value, an invalid attribute value was detected. "+
- "A VersionsValue must use a matching attribute type for the value. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("VersionsValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
- fmt.Sprintf("VersionsValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
- )
- }
- }
-
- for name := range attributes {
- _, ok := attributeTypes[name]
-
- if !ok {
- diags.AddError(
- "Extra VersionsValue Attribute Value",
- "While creating a VersionsValue value, an extra attribute value was detected. "+
- "A VersionsValue must not contain values beyond the expected attribute types. "+
- "This is always an issue with the provider and should be reported to the provider developers.\n\n"+
- fmt.Sprintf("Extra VersionsValue Attribute Name: %s", name),
- )
- }
- }
-
- if diags.HasError() {
- return NewVersionsValueUnknown(), diags
- }
-
- betaAttribute, ok := attributes["beta"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `beta is missing from object`)
-
- return NewVersionsValueUnknown(), diags
- }
-
- betaVal, ok := betaAttribute.(basetypes.BoolValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`beta expected to be basetypes.BoolValue, was: %T`, betaAttribute))
- }
-
- deprecatedAttribute, ok := attributes["deprecated"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `deprecated is missing from object`)
-
- return NewVersionsValueUnknown(), diags
- }
-
- deprecatedVal, ok := deprecatedAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`deprecated expected to be basetypes.StringValue, was: %T`, deprecatedAttribute))
- }
-
- recommendAttribute, ok := attributes["recommend"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `recommend is missing from object`)
-
- return NewVersionsValueUnknown(), diags
- }
-
- recommendVal, ok := recommendAttribute.(basetypes.BoolValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`recommend expected to be basetypes.BoolValue, was: %T`, recommendAttribute))
- }
-
- versionAttribute, ok := attributes["version"]
-
- if !ok {
- diags.AddError(
- "Attribute Missing",
- `version is missing from object`)
-
- return NewVersionsValueUnknown(), diags
- }
-
- versionVal, ok := versionAttribute.(basetypes.StringValue)
-
- if !ok {
- diags.AddError(
- "Attribute Wrong Type",
- fmt.Sprintf(`version expected to be basetypes.StringValue, was: %T`, versionAttribute))
- }
-
- if diags.HasError() {
- return NewVersionsValueUnknown(), diags
- }
-
- return VersionsValue{
- Beta: betaVal,
- Deprecated: deprecatedVal,
- Recommend: recommendVal,
- Version: versionVal,
- state: attr.ValueStateKnown,
- }, diags
-}
-
-func NewVersionsValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) VersionsValue {
- object, diags := NewVersionsValue(attributeTypes, attributes)
-
- if diags.HasError() {
- // This could potentially be added to the diag package.
- diagsStrings := make([]string, 0, len(diags))
-
- for _, diagnostic := range diags {
- diagsStrings = append(diagsStrings, fmt.Sprintf(
- "%s | %s | %s",
- diagnostic.Severity(),
- diagnostic.Summary(),
- diagnostic.Detail()))
- }
-
- panic("NewVersionsValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
- }
-
- return object
-}
-
-func (t VersionsType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
- if in.Type() == nil {
- return NewVersionsValueNull(), nil
- }
-
- if !in.Type().Equal(t.TerraformType(ctx)) {
- return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
- }
-
- if !in.IsKnown() {
- return NewVersionsValueUnknown(), nil
- }
-
- if in.IsNull() {
- return NewVersionsValueNull(), nil
- }
-
- attributes := map[string]attr.Value{}
-
- val := map[string]tftypes.Value{}
-
- err := in.As(&val)
-
- if err != nil {
- return nil, err
- }
-
- for k, v := range val {
- a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
-
- if err != nil {
- return nil, err
- }
-
- attributes[k] = a
- }
-
- return NewVersionsValueMust(VersionsValue{}.AttributeTypes(ctx), attributes), nil
-}
-
-func (t VersionsType) ValueType(ctx context.Context) attr.Value {
- return VersionsValue{}
-}
-
-var _ basetypes.ObjectValuable = VersionsValue{}
-
-type VersionsValue struct {
- Beta basetypes.BoolValue `tfsdk:"beta"`
- Deprecated basetypes.StringValue `tfsdk:"deprecated"`
- Recommend basetypes.BoolValue `tfsdk:"recommend"`
- Version basetypes.StringValue `tfsdk:"version"`
- state attr.ValueState
-}
-
-func (v VersionsValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
- attrTypes := make(map[string]tftypes.Type, 4)
-
- var val tftypes.Value
- var err error
-
- attrTypes["beta"] = basetypes.BoolType{}.TerraformType(ctx)
- attrTypes["deprecated"] = basetypes.StringType{}.TerraformType(ctx)
- attrTypes["recommend"] = basetypes.BoolType{}.TerraformType(ctx)
- attrTypes["version"] = basetypes.StringType{}.TerraformType(ctx)
-
- objectType := tftypes.Object{AttributeTypes: attrTypes}
-
- switch v.state {
- case attr.ValueStateKnown:
- vals := make(map[string]tftypes.Value, 4)
-
- val, err = v.Beta.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["beta"] = val
-
- val, err = v.Deprecated.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["deprecated"] = val
-
- val, err = v.Recommend.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["recommend"] = val
-
- val, err = v.Version.ToTerraformValue(ctx)
-
- if err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- vals["version"] = val
-
- if err := tftypes.ValidateValue(objectType, vals); err != nil {
- return tftypes.NewValue(objectType, tftypes.UnknownValue), err
- }
-
- return tftypes.NewValue(objectType, vals), nil
- case attr.ValueStateNull:
- return tftypes.NewValue(objectType, nil), nil
- case attr.ValueStateUnknown:
- return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
- default:
- panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
- }
-}
-
-func (v VersionsValue) IsNull() bool {
- return v.state == attr.ValueStateNull
-}
-
-func (v VersionsValue) IsUnknown() bool {
- return v.state == attr.ValueStateUnknown
-}
-
-func (v VersionsValue) String() string {
- return "VersionsValue"
-}
-
-func (v VersionsValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
- var diags diag.Diagnostics
-
- attributeTypes := map[string]attr.Type{
- "beta": basetypes.BoolType{},
- "deprecated": basetypes.StringType{},
- "recommend": basetypes.BoolType{},
- "version": basetypes.StringType{},
- }
-
- if v.IsNull() {
- return types.ObjectNull(attributeTypes), diags
- }
-
- if v.IsUnknown() {
- return types.ObjectUnknown(attributeTypes), diags
- }
-
- objVal, diags := types.ObjectValue(
- attributeTypes,
- map[string]attr.Value{
- "beta": v.Beta,
- "deprecated": v.Deprecated,
- "recommend": v.Recommend,
- "version": v.Version,
- })
-
- return objVal, diags
-}
-
-func (v VersionsValue) Equal(o attr.Value) bool {
- other, ok := o.(VersionsValue)
-
- if !ok {
- return false
- }
-
- if v.state != other.state {
- return false
- }
-
- if v.state != attr.ValueStateKnown {
- return true
- }
-
- if !v.Beta.Equal(other.Beta) {
- return false
- }
-
- if !v.Deprecated.Equal(other.Deprecated) {
- return false
- }
-
- if !v.Recommend.Equal(other.Recommend) {
- return false
- }
-
- if !v.Version.Equal(other.Version) {
- return false
- }
-
- return true
-}
-
-func (v VersionsValue) Type(ctx context.Context) attr.Type {
- return VersionsType{
- basetypes.ObjectType{
- AttrTypes: v.AttributeTypes(ctx),
- },
- }
-}
-
-func (v VersionsValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
- return map[string]attr.Type{
- "beta": basetypes.BoolType{},
- "deprecated": basetypes.StringType{},
- "recommend": basetypes.BoolType{},
- "version": basetypes.StringType{},
- }
-}
diff --git a/internal/testutils/helpers.go b/stackit/internal/testutil/testutil.go
similarity index 52%
rename from internal/testutils/helpers.go
rename to stackit/internal/testutil/testutil.go
index 4b460fba..e2ab0c59 100644
--- a/internal/testutils/helpers.go
+++ b/stackit/internal/testutil/testutil.go
@@ -1,11 +1,71 @@
-package testutils
+// Copyright (c) STACKIT
+
+package testutil
import (
+ "encoding/json"
"fmt"
"os"
+ "path/filepath"
+ "strings"
+ "time"
+
+ "github.com/hashicorp/terraform-plugin-framework/providerserver"
+ "github.com/hashicorp/terraform-plugin-go/tfprotov6"
+ "github.com/hashicorp/terraform-plugin-testing/config"
+ "github.com/hashicorp/terraform-plugin-testing/echoprovider"
+
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit"
+)
+
+const (
+ // Default location of credentials JSON
+ credentialsFilePath = ".stackit/credentials.json" //nolint:gosec // linter false positive
)
var (
+ // TestAccProtoV6ProviderFactories is used to instantiate a provider during
+ // acceptance testing. The factory function will be invoked for every Terraform
+ // CLI command executed to create a provider server to which the CLI can
+ // reattach.
+ TestAccProtoV6ProviderFactories = map[string]func() (tfprotov6.ProviderServer, error){
+ "stackit": providerserver.NewProtocol6WithError(stackit.New("test-version")()),
+ }
+
+ // TestEphemeralAccProtoV6ProviderFactories is used to instantiate a provider during
+ // acceptance testing. The factory function will be invoked for every Terraform
+ // CLI command executed to create a provider server to which the CLI can
+ // reattach.
+ //
+ // See the Terraform acceptance test documentation on ephemeral resources for more information:
+ // https://developer.hashicorp.com/terraform/plugin/testing/acceptance-tests/ephemeral-resources
+ TestEphemeralAccProtoV6ProviderFactories = map[string]func() (tfprotov6.ProviderServer, error){
+ "stackit": providerserver.NewProtocol6WithError(stackit.New("test-version")()),
+ "echo": echoprovider.NewProviderServer(),
+ }
+
+ // E2ETestsEnabled checks if end-to-end tests should be run.
+ // It is enabled when the TF_ACC environment variable is set to "1".
+ E2ETestsEnabled = os.Getenv("TF_ACC") == "1"
+ // OrganizationId is the id of organization used for tests
+ OrganizationId = os.Getenv("TF_ACC_ORGANIZATION_ID")
+ // ProjectId is the id of project used for tests
+ ProjectId = os.Getenv("TF_ACC_PROJECT_ID")
+ Region = os.Getenv("TF_ACC_REGION")
+ // ServerId is the id of a server used for some tests
+ ServerId = getenv("TF_ACC_SERVER_ID", "")
+ // TestProjectParentContainerID is the container id of the parent resource under which projects are created as part of the resource-manager acceptance tests
+ TestProjectParentContainerID = os.Getenv("TF_ACC_TEST_PROJECT_PARENT_CONTAINER_ID")
+ // TestProjectParentUUID is the uuid of the parent resource under which projects are created as part of the resource-manager acceptance tests
+ TestProjectParentUUID = os.Getenv("TF_ACC_TEST_PROJECT_PARENT_UUID")
+ // TestProjectServiceAccountEmail is the e-mail of a service account with admin permissions on the organization under which projects are created as part of the resource-manager acceptance tests
+ TestProjectServiceAccountEmail = os.Getenv("TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL")
+ // TestProjectUserEmail is the e-mail of a user for the project created as part of the resource-manager acceptance tests
+ // Default email: acc-test@sa.stackit.cloud
+ TestProjectUserEmail = getenv("TF_ACC_TEST_PROJECT_USER_EMAIL", "acc-test@sa.stackit.cloud")
+ // TestImageLocalFilePath is the local path to an image file used for image acceptance tests
+ TestImageLocalFilePath = getenv("TF_ACC_TEST_IMAGE_LOCAL_FILE_PATH", "default")
+
CdnCustomEndpoint = os.Getenv("TF_ACC_CDN_CUSTOM_ENDPOINT")
DnsCustomEndpoint = os.Getenv("TF_ACC_DNS_CUSTOM_ENDPOINT")
GitCustomEndpoint = os.Getenv("TF_ACC_GIT_CUSTOM_ENDPOINT")
@@ -33,29 +93,30 @@ var (
SKECustomEndpoint = os.Getenv("TF_ACC_SKE_CUSTOM_ENDPOINT")
)
+// Provider config helper functions
+
func ObservabilityProviderConfig() string {
if ObservabilityCustomEndpoint == "" {
- return `provider "stackitprivatepreview" {
+ return `provider "stackit" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
- provider "stackitprivatepreview" {
+ provider "stackit" {
observability_custom_endpoint = "%s"
}`,
ObservabilityCustomEndpoint,
)
}
-
func CdnProviderConfig() string {
if CdnCustomEndpoint == "" {
return `
- provider "stackitprivatepreview" {
+ provider "stackit" {
enable_beta_resources = true
}`
}
return fmt.Sprintf(`
- provider "stackitprivatepreview" {
+ provider "stackit" {
cdn_custom_endpoint = "%s"
enable_beta_resources = true
}`,
@@ -65,10 +126,10 @@ func CdnProviderConfig() string {
func DnsProviderConfig() string {
if DnsCustomEndpoint == "" {
- return `provider "stackitprivatepreview" {}`
+ return `provider "stackit" {}`
}
return fmt.Sprintf(`
- provider "stackitprivatepreview" {
+ provider "stackit" {
dns_custom_endpoint = "%s"
}`,
DnsCustomEndpoint,
@@ -78,12 +139,12 @@ func DnsProviderConfig() string {
func IaaSProviderConfig() string {
if IaaSCustomEndpoint == "" {
return `
- provider "stackitprivatepreview" {
+ provider "stackit" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
- provider "stackitprivatepreview" {
+ provider "stackit" {
iaas_custom_endpoint = "%s"
}`,
IaaSCustomEndpoint,
@@ -93,13 +154,13 @@ func IaaSProviderConfig() string {
func IaaSProviderConfigWithBetaResourcesEnabled() string {
if IaaSCustomEndpoint == "" {
return `
- provider "stackitprivatepreview" {
+ provider "stackit" {
enable_beta_resources = true
default_region = "eu01"
}`
}
return fmt.Sprintf(`
- provider "stackitprivatepreview" {
+ provider "stackit" {
enable_beta_resources = true
iaas_custom_endpoint = "%s"
}`,
@@ -110,13 +171,13 @@ func IaaSProviderConfigWithBetaResourcesEnabled() string {
func IaaSProviderConfigWithExperiments() string {
if IaaSCustomEndpoint == "" {
return `
- provider "stackitprivatepreview" {
+ provider "stackit" {
default_region = "eu01"
experiments = [ "routing-tables", "network" ]
}`
}
return fmt.Sprintf(`
- provider "stackitprivatepreview" {
+ provider "stackit" {
iaas_custom_endpoint = "%s"
experiments = [ "routing-tables", "network" ]
}`,
@@ -127,12 +188,12 @@ func IaaSProviderConfigWithExperiments() string {
func KMSProviderConfig() string {
if KMSCustomEndpoint == "" {
return `
- provider "stackitprivatepreview" {
+ provider "stackit" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
- provider "stackitprivatepreview" {
+ provider "stackit" {
kms_custom_endpoint = "%s"
}`,
KMSCustomEndpoint,
@@ -142,12 +203,12 @@ func KMSProviderConfig() string {
func LoadBalancerProviderConfig() string {
if LoadBalancerCustomEndpoint == "" {
return `
- provider "stackitprivatepreview" {
+ provider "stackit" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
- provider "stackitprivatepreview" {
+ provider "stackit" {
loadbalancer_custom_endpoint = "%s"
}`,
LoadBalancerCustomEndpoint,
@@ -157,12 +218,12 @@ func LoadBalancerProviderConfig() string {
func LogMeProviderConfig() string {
if LogMeCustomEndpoint == "" {
return `
- provider "stackitprivatepreview" {
+ provider "stackit" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
- provider "stackitprivatepreview" {
+ provider "stackit" {
logme_custom_endpoint = "%s"
}`,
LogMeCustomEndpoint,
@@ -172,12 +233,12 @@ func LogMeProviderConfig() string {
func MariaDBProviderConfig() string {
if MariaDBCustomEndpoint == "" {
return `
- provider "stackitprivatepreview" {
+ provider "stackit" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
- provider "stackitprivatepreview" {
+ provider "stackit" {
mariadb_custom_endpoint = "%s"
}`,
MariaDBCustomEndpoint,
@@ -187,13 +248,13 @@ func MariaDBProviderConfig() string {
func ModelServingProviderConfig() string {
if ModelServingCustomEndpoint == "" {
return `
- provider "stackitprivatepreview" {
+ provider "stackit" {
default_region = "eu01"
}
`
}
return fmt.Sprintf(`
- provider "stackitprivatepreview" {
+ provider "stackit" {
modelserving_custom_endpoint = "%s"
}`,
ModelServingCustomEndpoint,
@@ -203,12 +264,12 @@ func ModelServingProviderConfig() string {
func MongoDBFlexProviderConfig() string {
if MongoDBFlexCustomEndpoint == "" {
return `
- provider "stackitprivatepreview" {
+ provider "stackit" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
- provider "stackitprivatepreview" {
+ provider "stackit" {
mongodbflex_custom_endpoint = "%s"
}`,
MongoDBFlexCustomEndpoint,
@@ -218,12 +279,12 @@ func MongoDBFlexProviderConfig() string {
func ObjectStorageProviderConfig() string {
if ObjectStorageCustomEndpoint == "" {
return `
- provider "stackitprivatepreview" {
+ provider "stackit" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
- provider "stackitprivatepreview" {
+ provider "stackit" {
objectstorage_custom_endpoint = "%s"
}`,
ObjectStorageCustomEndpoint,
@@ -233,32 +294,29 @@ func ObjectStorageProviderConfig() string {
func OpenSearchProviderConfig() string {
if OpenSearchCustomEndpoint == "" {
return `
- provider "stackitprivatepreview" {
+ provider "stackit" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
- provider "stackitprivatepreview" {
+ provider "stackit" {
opensearch_custom_endpoint = "%s"
}`,
OpenSearchCustomEndpoint,
)
}
-func PostgresFlexProviderConfig(saFile string) string {
+func PostgresFlexProviderConfig() string {
if PostgresFlexCustomEndpoint == "" {
- return fmt.Sprintf(`
- provider "stackitprivatepreview" {
+ return `
+ provider "stackit" {
default_region = "eu01"
- service_account_key_path = "%s"
- }`, saFile)
+ }`
}
return fmt.Sprintf(`
- provider "stackitprivatepreview" {
- service_account_key_path = "%s"
+ provider "stackit" {
postgresflex_custom_endpoint = "%s"
}`,
- saFile,
PostgresFlexCustomEndpoint,
)
}
@@ -266,12 +324,12 @@ func PostgresFlexProviderConfig(saFile string) string {
func RabbitMQProviderConfig() string {
if RabbitMQCustomEndpoint == "" {
return `
- provider "stackitprivatepreview" {
+ provider "stackit" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
- provider "stackitprivatepreview" {
+ provider "stackit" {
rabbitmq_custom_endpoint = "%s"
}`,
RabbitMQCustomEndpoint,
@@ -281,68 +339,66 @@ func RabbitMQProviderConfig() string {
func RedisProviderConfig() string {
if RedisCustomEndpoint == "" {
return `
- provider "stackitprivatepreview" {
+ provider "stackit" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
- provider "stackitprivatepreview" {
+ provider "stackit" {
redis_custom_endpoint = "%s"
}`,
RedisCustomEndpoint,
)
}
-func ResourceManagerProviderConfig(saKeyPath string) string {
+func ResourceManagerProviderConfig() string {
+ token := GetTestProjectServiceAccountToken("")
if ResourceManagerCustomEndpoint == "" || AuthorizationCustomEndpoint == "" {
return fmt.Sprintf(`
- provider "stackitprivatepreview" {
- service_account_key_path = "%s"
+ provider "stackit" {
+ service_account_token = "%s"
}`,
- saKeyPath,
+ token,
)
}
return fmt.Sprintf(`
- provider "stackitprivatepreview" {
+ provider "stackit" {
resourcemanager_custom_endpoint = "%s"
authorization_custom_endpoint = "%s"
- service_account_key_path = "%s"
+ service_account_token = "%s"
}`,
ResourceManagerCustomEndpoint,
AuthorizationCustomEndpoint,
- saKeyPath,
+ token,
)
}
func SecretsManagerProviderConfig() string {
if SecretsManagerCustomEndpoint == "" {
return `
- provider "stackitprivatepreview" {
+ provider "stackit" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
- provider "stackitprivatepreview" {
+ provider "stackit" {
secretsmanager_custom_endpoint = "%s"
}`,
SecretsManagerCustomEndpoint,
)
}
-func SQLServerFlexProviderConfig(saFile string) string {
+func SQLServerFlexProviderConfig() string {
if SQLServerFlexCustomEndpoint == "" {
- return fmt.Sprintf(`
- provider "stackitprivatepreview" {
+ return `
+ provider "stackit" {
default_region = "eu01"
- service_account_key_path = "%s"
- }`, saFile)
+ }`
}
return fmt.Sprintf(`
- provider "stackitprivatepreview" {
- service_account_key_path = "%s"
+ provider "stackit" {
sqlserverflex_custom_endpoint = "%s"
}`,
- saFile,
SQLServerFlexCustomEndpoint,
)
}
@@ -350,13 +406,13 @@ func SQLServerFlexProviderConfig(saFile string) string {
func ServerBackupProviderConfig() string {
if ServerBackupCustomEndpoint == "" {
return `
- provider "stackitprivatepreview" {
+ provider "stackit" {
default_region = "eu01"
enable_beta_resources = true
}`
}
return fmt.Sprintf(`
- provider "stackitprivatepreview" {
+ provider "stackit" {
server_backup_custom_endpoint = "%s"
enable_beta_resources = true
}`,
@@ -367,13 +423,13 @@ func ServerBackupProviderConfig() string {
func ServerUpdateProviderConfig() string {
if ServerUpdateCustomEndpoint == "" {
return `
- provider "stackitprivatepreview" {
+ provider "stackit" {
default_region = "eu01"
enable_beta_resources = true
}`
}
return fmt.Sprintf(`
- provider "stackitprivatepreview" {
+ provider "stackit" {
server_update_custom_endpoint = "%s"
enable_beta_resources = true
}`,
@@ -384,12 +440,12 @@ func ServerUpdateProviderConfig() string {
func SKEProviderConfig() string {
if SKECustomEndpoint == "" {
return `
- provider "stackitprivatepreview" {
+ provider "stackit" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
- provider "stackitprivatepreview" {
+ provider "stackit" {
ske_custom_endpoint = "%s"
}`,
SKECustomEndpoint,
@@ -399,13 +455,13 @@ func SKEProviderConfig() string {
func AuthorizationProviderConfig() string {
if AuthorizationCustomEndpoint == "" {
return `
- provider "stackitprivatepreview" {
+ provider "stackit" {
default_region = "eu01"
experiments = ["iam"]
}`
}
return fmt.Sprintf(`
- provider "stackitprivatepreview" {
+ provider "stackit" {
authorization_custom_endpoint = "%s"
experiments = ["iam"]
}`,
@@ -416,13 +472,13 @@ func AuthorizationProviderConfig() string {
func ServiceAccountProviderConfig() string {
if ServiceAccountCustomEndpoint == "" {
return `
- provider "stackitprivatepreview" {
+ provider "stackit" {
default_region = "eu01"
enable_beta_resources = true
}`
}
return fmt.Sprintf(`
- provider "stackitprivatepreview" {
+ provider "stackit" {
service_account_custom_endpoint = "%s"
enable_beta_resources = true
}`,
@@ -433,13 +489,13 @@ func ServiceAccountProviderConfig() string {
func GitProviderConfig() string {
if GitCustomEndpoint == "" {
return `
- provider "stackitprivatepreview" {
+ provider "stackit" {
default_region = "eu01"
enable_beta_resources = true
}`
}
return fmt.Sprintf(`
- provider "stackitprivatepreview" {
+ provider "stackit" {
git_custom_endpoint = "%s"
enable_beta_resources = true
}`,
@@ -450,15 +506,105 @@ func GitProviderConfig() string {
func ScfProviderConfig() string {
if ScfCustomEndpoint == "" {
return `
- provider "stackitprivatepreview" {
+ provider "stackit" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
- provider "stackitprivatepreview" {
+ provider "stackit" {
default_region = "eu01"
scf_custom_endpoint = "%s"
}`,
ScfCustomEndpoint,
)
}
+
+func ResourceNameWithDateTime(name string) string {
+ dateTime := time.Now().Format(time.RFC3339)
+ // Remove timezone to have a smaller datetime
+ dateTimeTrimmed, _, _ := strings.Cut(dateTime, "+")
+ return fmt.Sprintf("tf-acc-%s-%s", name, dateTimeTrimmed)
+}
+
+func GetTestProjectServiceAccountToken(path string) string {
+ var err error
+ token, tokenSet := os.LookupEnv("TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN")
+ if !tokenSet || token == "" {
+ token, err = readTestTokenFromCredentialsFile(path)
+ if err != nil {
+ return ""
+ }
+ }
+ return token
+}
+
+func readTestTokenFromCredentialsFile(path string) (string, error) {
+ if path == "" {
+ customPath, customPathSet := os.LookupEnv("STACKIT_CREDENTIALS_PATH")
+ if !customPathSet || customPath == "" {
+ path = credentialsFilePath
+ home, err := os.UserHomeDir()
+ if err != nil {
+ return "", fmt.Errorf("getting home directory: %w", err)
+ }
+ path = filepath.Join(home, path)
+ } else {
+ path = customPath
+ }
+ }
+
+ credentialsRaw, err := os.ReadFile(path)
+ if err != nil {
+ return "", fmt.Errorf("opening file: %w", err)
+ }
+
+ var credentials struct {
+ TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN string `json:"TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN"`
+ }
+ err = json.Unmarshal(credentialsRaw, &credentials)
+ if err != nil {
+ return "", fmt.Errorf("unmarshalling credentials: %w", err)
+ }
+ return credentials.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN, nil
+}
+
+func getenv(key, defaultValue string) string {
+ val := os.Getenv(key)
+ if val == "" {
+ return defaultValue
+ }
+ return val
+}
+
+// CreateDefaultLocalFile is a helper for local_file_path. No real data is created
+func CreateDefaultLocalFile() os.File {
+ // Define the file name and size
+ fileName := "test-512k.img"
+ size := 512 * 1024 // 512 KB
+
+ // Create the file
+ file, err := os.Create(fileName)
+ if err != nil {
+ panic(err)
+ }
+
+ // Seek to the desired position (512 KB)
+ _, err = file.Seek(int64(size), 0)
+ if err != nil {
+ panic(err)
+ }
+
+ return *file
+}
+
+func ConvertConfigVariable(variable config.Variable) string {
+ tmpByteArray, _ := variable.MarshalJSON()
+ // In case the variable is a string, the quotes should be removed
+ if tmpByteArray[0] == '"' && tmpByteArray[len(tmpByteArray)-1] == '"' {
+ result := string(tmpByteArray[1 : len(tmpByteArray)-1])
+ // Replace escaped quotes which where added MarshalJSON
+ rawString := strings.ReplaceAll(result, `\"`, `"`)
+ return rawString
+ }
+ return string(tmpByteArray)
+}
diff --git a/internal/testutils/testutils_test.go b/stackit/internal/testutil/testutil_test.go
similarity index 95%
rename from internal/testutils/testutils_test.go
rename to stackit/internal/testutil/testutil_test.go
index 4e18bd1e..f74ca81c 100644
--- a/internal/testutils/testutils_test.go
+++ b/stackit/internal/testutil/testutil_test.go
@@ -1,4 +1,6 @@
-package testutils
+// Copyright (c) STACKIT
+
+package testutil
import (
"testing"
diff --git a/stackit/internal/utils/attributes.go b/stackit/internal/utils/attributes.go
index 26d228c3..6e3ec386 100644
--- a/stackit/internal/utils/attributes.go
+++ b/stackit/internal/utils/attributes.go
@@ -10,7 +10,6 @@ import (
"github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/types"
-
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
)
diff --git a/stackit/internal/utils/planModifiers_test.go b/stackit/internal/utils/planModifiers_test.go
deleted file mode 100644
index 337ea36f..00000000
--- a/stackit/internal/utils/planModifiers_test.go
+++ /dev/null
@@ -1,224 +0,0 @@
-package utils
-
-import (
- "testing"
-
- "github.com/google/go-cmp/cmp"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/stackitcloud/stackit-sdk-go/core/utils"
-)
-
-func TestReadModifiersConfig(t *testing.T) {
- testcases := []struct {
- name string
- content []byte
- wantErr bool
- }{
- {
- name: "valid yaml",
- content: []byte(`
-fields:
- - name: 'id'
- modifiers:
- - 'UseStateForUnknown'
-`),
- wantErr: false,
- },
- {
- name: "invalid yaml",
- content: []byte(`invalid: yaml: :`),
- wantErr: true,
- },
- }
-
- for _, tc := range testcases {
- t.Run(
- tc.name, func(t *testing.T) {
- _, err := ReadModifiersConfig(tc.content)
- if (err != nil) != tc.wantErr {
- t.Errorf("ReadModifiersConfig() error = %v, wantErr %v", err, tc.wantErr)
- }
- },
- )
- }
-}
-
-func TestAddPlanModifiersToResourceSchema(t *testing.T) {
- testcases := []struct {
- name string
- fields *Fields
- sch *schema.Schema
- wantErr bool
- }{
- {
- name: "full coverage - all types and nested structures",
- fields: &Fields{
- Fields: []*Field{
- {
- Name: "string_attr",
- Modifiers: []*string{utils.Ptr("RequiresReplace"), utils.Ptr("UseStateForUnknown")},
- },
- {Name: "bool_attr", Modifiers: []*string{utils.Ptr("RequiresReplace")}},
- {Name: "int_attr", Modifiers: []*string{utils.Ptr("UseStateForUnknown")}},
- {Name: "list_attr", Modifiers: []*string{utils.Ptr("RequiresReplace")}},
- {Name: "Nested.sub_string", Modifiers: []*string{utils.Ptr("RequiresReplace")}},
- },
- },
- sch: &schema.Schema{
- Attributes: map[string]schema.Attribute{
- "StringAttr": schema.StringAttribute{},
- "BoolAttr": schema.BoolAttribute{},
- "IntAttr": schema.Int64Attribute{},
- "ListAttr": schema.ListAttribute{},
- "Nested": schema.SingleNestedAttribute{
- Attributes: map[string]schema.Attribute{
- "SubString": schema.StringAttribute{},
- },
- },
- "Unsupported": schema.MapAttribute{ElementType: types.StringType}, // Triggers default/warn case
- },
- },
- wantErr: false,
- },
- {
- name: "validation error - invalid modifier",
- fields: &Fields{
- Fields: []*Field{
- {Name: "id", Modifiers: []*string{utils.Ptr("InvalidModifier")}},
- },
- },
- sch: &schema.Schema{
- Attributes: map[string]schema.Attribute{"id": schema.StringAttribute{}},
- },
- wantErr: true,
- },
- {
- name: "validation error - empty modifier",
- fields: &Fields{
- Fields: []*Field{
- {Name: "id", Modifiers: []*string{utils.Ptr("")}},
- },
- },
- sch: &schema.Schema{},
- wantErr: true,
- },
- {
- name: "nil fields - should return nil",
- fields: nil,
- sch: &schema.Schema{},
- wantErr: false,
- },
- }
-
- for _, tc := range testcases {
- t.Run(
- tc.name, func(t *testing.T) {
- err := AddPlanModifiersToResourceSchema(tc.fields, tc.sch)
-
- if (err != nil) != tc.wantErr {
- t.Fatalf("AddPlanModifiersToResourceSchema() error = %v, wantErr %v", err, tc.wantErr)
- }
-
- if !tc.wantErr && tc.name == "full coverage - all types and nested structures" {
- // Check StringAttr
- if sAttr, ok := tc.sch.Attributes["StringAttr"].(schema.StringAttribute); ok {
- if len(sAttr.PlanModifiers) != 2 {
- t.Errorf("StringAttr: expected 2 modifiers, got %d", len(sAttr.PlanModifiers))
- }
- }
-
- // Check Nested Sub-Attribute
- if nested, ok := tc.sch.Attributes["Nested"].(schema.SingleNestedAttribute); ok {
- if subAttr, ok := nested.Attributes["SubString"].(schema.StringAttribute); ok {
- if len(subAttr.PlanModifiers) != 1 {
- // Dies schlug vorher fehl, weil der Prefix "Nested" statt "nested" war
- t.Errorf("Nested SubString: expected 1 modifier, got %d", len(subAttr.PlanModifiers))
- }
- } else {
- t.Error("SubString attribute not found in Nested")
- }
- } else {
- t.Error("Nested attribute not found")
- }
- }
- },
- )
- }
-}
-
-func TestFieldListToMap(t *testing.T) {
- testcases := []struct {
- name string
- fields *Fields
- want map[string][]*string
- }{
- {
- name: "convert list to map",
- fields: &Fields{
- Fields: []*Field{
- {Name: "test", Modifiers: []*string{utils.Ptr("mod")}},
- },
- },
- want: map[string][]*string{
- "test": {utils.Ptr("mod")},
- },
- },
- {
- name: "nil fields",
- fields: nil,
- want: map[string][]*string{},
- },
- }
-
- for _, tc := range testcases {
- t.Run(
- tc.name, func(t *testing.T) {
- got := fieldListToMap(tc.fields)
- if diff := cmp.Diff(tc.want, got); diff != "" {
- t.Errorf("fieldListToMap() mismatch (-want +got):\n%s", diff)
- }
- },
- )
- }
-}
-
-func TestHandleTypeMismatches(t *testing.T) {
- modifiers := []*string{utils.Ptr("RequiresReplace")}
-
- t.Run(
- "bool type mismatch", func(t *testing.T) {
- _, err := handleBoolPlanModifiers(schema.StringAttribute{}, modifiers)
- if err == nil {
- t.Error("expected error for type mismatch in handleBoolPlanModifiers")
- }
- },
- )
-
- t.Run(
- "string type mismatch", func(t *testing.T) {
- _, err := handleStringPlanModifiers(schema.BoolAttribute{}, modifiers)
- if err == nil {
- t.Error("expected error for type mismatch in handleStringPlanModifiers")
- }
- },
- )
-
- t.Run(
- "int64 type mismatch", func(t *testing.T) {
- _, err := handleInt64PlanModifiers(schema.StringAttribute{}, modifiers)
- if err == nil {
- t.Error("expected error for type mismatch in handleInt64PlanModifiers")
- }
- },
- )
-
- t.Run(
- "list type mismatch", func(t *testing.T) {
- _, err := handleListPlanModifiers(schema.StringAttribute{}, modifiers)
- if err == nil {
- t.Error("expected error for type mismatch in handleListPlanModifiers")
- }
- },
- )
-}
diff --git a/stackit/internal/utils/regions.go b/stackit/internal/utils/regions.go
index 70f79620..5c06ca1b 100644
--- a/stackit/internal/utils/regions.go
+++ b/stackit/internal/utils/regions.go
@@ -8,7 +8,6 @@ import (
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/types"
-
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
)
diff --git a/stackit/internal/utils/strings.go b/stackit/internal/utils/strings.go
deleted file mode 100644
index 745139f8..00000000
--- a/stackit/internal/utils/strings.go
+++ /dev/null
@@ -1,12 +0,0 @@
-package utils
-
-func RemoveQuotes(src string) string {
- var res string
- if src != "" && src[0] == '"' {
- res = src[1:]
- }
- if res != "" && res[len(res)-1] == '"' {
- res = res[:len(res)-1]
- }
- return res
-}
diff --git a/stackit/internal/utils/utils.go b/stackit/internal/utils/utils.go
index 8ca4984d..fbf5cb6e 100644
--- a/stackit/internal/utils/utils.go
+++ b/stackit/internal/utils/utils.go
@@ -20,7 +20,6 @@ import (
"github.com/hashicorp/terraform-plugin-log/tflog"
"github.com/stackitcloud/stackit-sdk-go/core/oapierror"
"github.com/stackitcloud/stackit-sdk-go/core/utils"
-
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
)
diff --git a/stackit/internal/validate/validate.go b/stackit/internal/validate/validate.go
index d118ac52..07d137ae 100644
--- a/stackit/internal/validate/validate.go
+++ b/stackit/internal/validate/validate.go
@@ -18,7 +18,6 @@ import (
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
"github.com/teambition/rrule-go"
-
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
)
diff --git a/stackit/internal/wait/postgresflexalpha/wait.go b/stackit/internal/wait/postgresflexalpha/wait.go
index 26f2d729..5177e6f1 100644
--- a/stackit/internal/wait/postgresflexalpha/wait.go
+++ b/stackit/internal/wait/postgresflexalpha/wait.go
@@ -2,14 +2,11 @@ package postgresflexalpha
import (
"context"
- "errors"
"fmt"
- "math"
- "net/http"
"time"
"github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
+ postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"github.com/stackitcloud/stackit-sdk-go/core/oapierror"
"github.com/stackitcloud/stackit-sdk-go/core/wait"
@@ -24,60 +21,58 @@ const (
InstanceStateTerminating = "TERMINATING"
InstanceStateUnknown = "UNKNOWN"
InstanceStatePending = "PENDING"
- InstanceStateDeleted = "DELETED"
)
// APIClientInstanceInterface Interface needed for tests
type APIClientInstanceInterface interface {
- GetInstanceRequest(ctx context.Context, projectId, region, instanceId string) v3alpha1api.ApiGetInstanceRequestRequest
+ GetInstanceRequestExecute(ctx context.Context, projectId, region, instanceId string) (
+ *postgresflex.GetInstanceResponse,
+ error,
+ )
- ListUsersRequest(
+ ListUsersRequestExecute(
ctx context.Context,
projectId string,
region string,
instanceId string,
- ) v3alpha1api.ApiListUsersRequestRequest
+ ) (*postgresflex.ListUserResponse, error)
}
// APIClientUserInterface Interface needed for tests
type APIClientUserInterface interface {
- GetUserRequest(ctx context.Context, projectId, region, instanceId string, userId int32) v3alpha1api.ApiGetUserRequestRequest
-}
-
-// APIClientDatabaseInterface Interface needed for tests
-type APIClientDatabaseInterface interface {
- GetDatabaseRequest(ctx context.Context, projectId string, region string, instanceId string, databaseId int32) v3alpha1api.ApiGetDatabaseRequestRequest
+ GetUserRequestExecute(ctx context.Context, projectId, region, instanceId string, userId int64) (
+ *postgresflex.GetUserResponse,
+ error,
+ )
}
// CreateInstanceWaitHandler will wait for instance creation
func CreateInstanceWaitHandler(
ctx context.Context, a APIClientInstanceInterface, projectId, region,
instanceId string,
-) *wait.AsyncActionHandler[v3alpha1api.GetInstanceResponse] {
+) *wait.AsyncActionHandler[postgresflex.GetInstanceResponse] {
instanceCreated := false
- var instanceGetResponse *v3alpha1api.GetInstanceResponse
+ var instanceGetResponse *postgresflex.GetInstanceResponse
maxWait := time.Minute * 45
startTime := time.Now()
extendedTimeout := 0
handler := wait.New(
- func() (waitFinished bool, response *v3alpha1api.GetInstanceResponse, err error) {
+ func() (waitFinished bool, response *postgresflex.GetInstanceResponse, err error) {
if !instanceCreated {
- s, err := a.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
+ s, err := a.GetInstanceRequestExecute(ctx, projectId, region, instanceId)
if err != nil {
return false, nil, err
}
- if s == nil || s.Id != instanceId {
+ if s == nil || s.Id == nil || *s.Id != instanceId || s.Status == nil {
return false, nil, nil
}
- tflog.Debug(
- ctx, "waiting for instance ready", map[string]interface{}{
- "status": s.Status,
- },
- )
- switch s.Status {
+ tflog.Debug(ctx, "waiting for instance ready", map[string]interface{}{
+ "status": *s.Status,
+ })
+ switch *s.Status {
default:
- return true, s, fmt.Errorf("instance with id %s has unexpected status %s", instanceId, s.Status)
+ return true, s, fmt.Errorf("instance with id %s has unexpected status %s", instanceId, *s.Status)
case InstanceStateEmpty:
return false, nil, nil
case InstanceStatePending:
@@ -98,11 +93,11 @@ func CreateInstanceWaitHandler(
),
)
if extendedTimeout < 3 {
- maxWait += time.Minute * 5
- extendedTimeout++
+ maxWait = maxWait + time.Minute*5
+ extendedTimeout = extendedTimeout + 1
if *s.Network.AccessScope == "SNA" {
ready := true
- if s.Network.InstanceAddress == nil {
+ if s.Network == nil || s.Network.InstanceAddress == nil {
tflog.Warn(ctx, "Waiting for instance_address")
ready = false
}
@@ -114,18 +109,22 @@ func CreateInstanceWaitHandler(
return false, nil, nil
}
}
+ if s.IsDeletable == nil {
+ tflog.Warn(ctx, "Waiting for is_deletable")
+ return false, nil, nil
+ }
}
instanceCreated = true
instanceGetResponse = s
case InstanceStateSuccess:
- if s.Network.AccessScope != nil && *s.Network.AccessScope == "SNA" {
- if s.Network.InstanceAddress == nil {
+ if *s.Network.AccessScope == "SNA" {
+ if s.Network == nil || s.Network.InstanceAddress == nil {
tflog.Warn(ctx, "Waiting for instance_address")
return false, nil, nil
}
if s.Network.RouterAddress == nil {
- tflog.Warn(ctx, "Waiting for router_address")
+ tflog.Info(ctx, "Waiting for router_address")
return false, nil, nil
}
}
@@ -142,7 +141,7 @@ func CreateInstanceWaitHandler(
tflog.Info(ctx, "Waiting for instance (calling list users")
// // User operations aren't available right after an instance is deemed successful
// // To check if they are, perform a users request
- _, err = a.ListUsersRequest(ctx, projectId, region, instanceId).Execute()
+ _, err = a.ListUsersRequestExecute(ctx, projectId, region, instanceId)
if err == nil {
return true, instanceGetResponse, nil
}
@@ -150,7 +149,6 @@ func CreateInstanceWaitHandler(
if !ok {
return false, nil, err
}
- // TODO: refactor and cooperate with api guys to mitigate
if oapiErr.StatusCode < 500 {
return true, instanceGetResponse, fmt.Errorf(
"users request after instance creation returned %d status code",
@@ -167,21 +165,21 @@ func CreateInstanceWaitHandler(
// PartialUpdateInstanceWaitHandler will wait for instance update
func PartialUpdateInstanceWaitHandler(
- ctx context.Context, a APIClientInstanceInterface, projectID, region,
- instanceID string,
-) *wait.AsyncActionHandler[v3alpha1api.GetInstanceResponse] {
+ ctx context.Context, a APIClientInstanceInterface, projectId, region,
+ instanceId string,
+) *wait.AsyncActionHandler[postgresflex.GetInstanceResponse] {
handler := wait.New(
- func() (waitFinished bool, response *v3alpha1api.GetInstanceResponse, err error) {
- s, err := a.GetInstanceRequest(ctx, projectID, region, instanceID).Execute()
+ func() (waitFinished bool, response *postgresflex.GetInstanceResponse, err error) {
+ s, err := a.GetInstanceRequestExecute(ctx, projectId, region, instanceId)
if err != nil {
return false, nil, err
}
- if s == nil || s.Id != instanceID {
+ if s == nil || s.Id == nil || *s.Id != instanceId || s.Status == nil {
return false, nil, nil
}
- switch s.Status {
+ switch *s.Status {
default:
- return true, s, fmt.Errorf("instance with id %s has unexpected status %s", instanceID, s.Status)
+ return true, s, fmt.Errorf("instance with id %s has unexpected status %s", instanceId, *s.Status)
case InstanceStateEmpty:
return false, nil, nil
case InstanceStatePending:
@@ -195,137 +193,10 @@ func PartialUpdateInstanceWaitHandler(
case InstanceStateUnknown:
return false, nil, nil
case InstanceStateFailed:
- return true, s, fmt.Errorf("update got status FAILURE for instance with id %s", instanceID)
+ return true, s, fmt.Errorf("update failed for instance with id %s", instanceId)
}
},
)
handler.SetTimeout(45 * time.Minute).SetSleepBeforeWait(30 * time.Second)
return handler
}
-
-// GetUserByIdWaitHandler will wait for instance creation
-func GetUserByIdWaitHandler(
- ctx context.Context,
- a APIClientUserInterface,
- projectID, instanceID, region string,
- userID int64,
-) *wait.AsyncActionHandler[v3alpha1api.GetUserResponse] {
- handler := wait.New(
- func() (waitFinished bool, response *v3alpha1api.GetUserResponse, err error) {
- if userID > math.MaxInt32 {
- return false, nil, fmt.Errorf("userID too large for int32")
- }
- userID32 := int32(userID) //nolint:gosec // checked above
- s, err := a.GetUserRequest(ctx, projectID, region, instanceID, userID32).Execute()
- if err != nil {
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(err, &oapiErr)
- if !ok {
- return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError")
- }
- switch oapiErr.StatusCode {
- case http.StatusBadGateway, http.StatusGatewayTimeout, http.StatusServiceUnavailable:
- case http.StatusNotFound:
- tflog.Warn(
- ctx, "api responded with status", map[string]interface{}{
- "status": oapiErr.StatusCode,
- },
- )
- return false, nil, nil
- default:
- return false, nil, err
- }
- }
- return true, s, nil
- },
- )
- return handler
-}
-
-// GetDatabaseByIdWaitHandler will wait for instance creation
-func GetDatabaseByIdWaitHandler(
- ctx context.Context,
- a APIClientDatabaseInterface,
- projectID, instanceID, region string,
- databaseID int64,
-) *wait.AsyncActionHandler[v3alpha1api.GetDatabaseResponse] {
- handler := wait.New(
- func() (waitFinished bool, response *v3alpha1api.GetDatabaseResponse, err error) {
- if databaseID > math.MaxInt32 {
- return false, nil, fmt.Errorf("databaseID too large for int32")
- }
- dbId32 := int32(databaseID) //nolint:gosec // is checked above
- s, err := a.GetDatabaseRequest(ctx, projectID, region, instanceID, dbId32).Execute()
- if err != nil {
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(err, &oapiErr)
- if !ok {
- return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError")
- }
- switch oapiErr.StatusCode {
- case http.StatusBadGateway, http.StatusGatewayTimeout, http.StatusServiceUnavailable:
- tflog.Warn(
- ctx, "api responded with 50[2,3,4] status", map[string]interface{}{
- "status": oapiErr.StatusCode,
- },
- )
- return false, nil, nil
- case http.StatusNotFound:
- tflog.Warn(
- ctx, "api responded with 404 status", map[string]interface{}{
- "status": oapiErr.StatusCode,
- },
- )
- return false, nil, nil
- default:
- return false, nil, err
- }
- }
- return true, s, nil
- },
- )
- return handler
-}
-
-func DeleteInstanceWaitHandler(
- ctx context.Context,
- a APIClientInstanceInterface,
- projectID,
- region,
- instanceID string,
- timeout, sleepBeforeWait time.Duration,
-) error {
- handler := wait.New(
- func() (waitFinished bool, response *v3alpha1api.GetInstanceResponse, err error) {
- s, err := a.GetInstanceRequest(ctx, projectID, region, instanceID).Execute()
- if err != nil {
- oapiErr, ok := err.(*oapierror.GenericOpenAPIError) // nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
- if !ok {
- return false, nil, fmt.Errorf("received error is no oapierror: %w", err)
- }
- if oapiErr.StatusCode == 404 {
- return true, nil, nil
- }
- return false, nil, fmt.Errorf("api returned error: %w", err)
- }
- switch s.Status {
- case InstanceStateDeleted:
- return true, nil, nil
- case InstanceStateEmpty, InstanceStatePending, InstanceStateUnknown, InstanceStateProgressing, InstanceStateSuccess:
- return false, nil, nil
- case InstanceStateFailed:
- return true, nil, fmt.Errorf("wait handler got status FAILURE for instance: %s", instanceID)
- default:
- return true, s, fmt.Errorf("instance with id %s has unexpected status %s", instanceID, s.Status)
- }
- },
- ).
- SetTimeout(timeout).
- SetSleepBeforeWait(sleepBeforeWait)
-
- _, err := handler.WaitWithContext(ctx)
- if err != nil {
- return err
- }
- return nil
-}
diff --git a/stackit/internal/wait/postgresflexalpha/wait_test.go b/stackit/internal/wait/postgresflexalpha/wait_test.go
index faef6cbf..e9583d14 100644
--- a/stackit/internal/wait/postgresflexalpha/wait_test.go
+++ b/stackit/internal/wait/postgresflexalpha/wait_test.go
@@ -10,34 +10,86 @@ import (
"github.com/google/go-cmp/cmp"
"github.com/stackitcloud/stackit-sdk-go/core/oapierror"
"github.com/stackitcloud/stackit-sdk-go/core/utils"
- "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
+ postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
)
+// Used for testing instance operations
+type apiClientInstanceMocked struct {
+ instanceId string
+ instanceState string
+ instanceNetwork postgresflex.InstanceNetwork
+ instanceIsForceDeleted bool
+ instanceGetFails bool
+ usersGetErrorStatus int
+}
+
+func (a *apiClientInstanceMocked) GetInstanceRequestExecute(
+ _ context.Context,
+ _, _, _ string,
+) (*postgresflex.GetInstanceResponse, error) {
+ if a.instanceGetFails {
+ return nil, &oapierror.GenericOpenAPIError{
+ StatusCode: 500,
+ }
+ }
+
+ if a.instanceIsForceDeleted {
+ return nil, &oapierror.GenericOpenAPIError{
+ StatusCode: 404,
+ }
+ }
+
+ return &postgresflex.GetInstanceResponse{
+ Id: &a.instanceId,
+ Status: postgresflex.GetInstanceResponseGetStatusAttributeType(&a.instanceState),
+ Network: postgresflex.GetInstanceResponseGetNetworkAttributeType(&a.instanceNetwork),
+ }, nil
+}
+
+func (a *apiClientInstanceMocked) ListUsersRequestExecute(
+ _ context.Context,
+ _, _, _ string,
+) (*postgresflex.ListUserResponse, error) {
+ if a.usersGetErrorStatus != 0 {
+ return nil, &oapierror.GenericOpenAPIError{
+ StatusCode: a.usersGetErrorStatus,
+ }
+ }
+
+ aux := int64(0)
+ return &postgresflex.ListUserResponse{
+ Pagination: &postgresflex.Pagination{
+ TotalRows: &aux,
+ },
+ Users: &[]postgresflex.ListUser{},
+ }, nil
+}
+
func TestCreateInstanceWaitHandler(t *testing.T) {
tests := []struct {
desc string
instanceGetFails bool
instanceState string
- instanceNetwork v3alpha1api.InstanceNetwork
+ instanceNetwork postgresflex.InstanceNetwork
usersGetErrorStatus int
wantErr bool
- wantRes *v3alpha1api.GetInstanceResponse
+ wantRes *postgresflex.GetInstanceResponse
}{
{
desc: "create_succeeded",
instanceGetFails: false,
instanceState: InstanceStateSuccess,
- instanceNetwork: v3alpha1api.InstanceNetwork{
+ instanceNetwork: postgresflex.InstanceNetwork{
AccessScope: nil,
Acl: nil,
InstanceAddress: utils.Ptr("10.0.0.1"),
RouterAddress: utils.Ptr("10.0.0.1"),
},
wantErr: false,
- wantRes: &v3alpha1api.GetInstanceResponse{
- Id: "foo-bar",
- Status: InstanceStateSuccess,
- Network: v3alpha1api.InstanceNetwork{
+ wantRes: &postgresflex.GetInstanceResponse{
+ Id: utils.Ptr("foo-bar"),
+ Status: postgresflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr(InstanceStateSuccess)),
+ Network: &postgresflex.InstanceNetwork{
AccessScope: nil,
Acl: nil,
InstanceAddress: utils.Ptr("10.0.0.1"),
@@ -49,7 +101,7 @@ func TestCreateInstanceWaitHandler(t *testing.T) {
desc: "create_failed",
instanceGetFails: false,
instanceState: InstanceStateFailed,
- instanceNetwork: v3alpha1api.InstanceNetwork{
+ instanceNetwork: postgresflex.InstanceNetwork{
AccessScope: nil,
Acl: nil,
InstanceAddress: utils.Ptr("10.0.0.1"),
@@ -62,7 +114,7 @@ func TestCreateInstanceWaitHandler(t *testing.T) {
desc: "create_failed_2",
instanceGetFails: false,
instanceState: InstanceStateEmpty,
- instanceNetwork: v3alpha1api.InstanceNetwork{
+ instanceNetwork: postgresflex.InstanceNetwork{
AccessScope: nil,
Acl: nil,
InstanceAddress: utils.Ptr("10.0.0.1"),
@@ -81,7 +133,7 @@ func TestCreateInstanceWaitHandler(t *testing.T) {
desc: "users_get_fails",
instanceGetFails: false,
instanceState: InstanceStateSuccess,
- instanceNetwork: v3alpha1api.InstanceNetwork{
+ instanceNetwork: postgresflex.InstanceNetwork{
AccessScope: nil,
Acl: nil,
InstanceAddress: utils.Ptr("10.0.0.1"),
@@ -95,7 +147,7 @@ func TestCreateInstanceWaitHandler(t *testing.T) {
desc: "users_get_fails_2",
instanceGetFails: false,
instanceState: InstanceStateSuccess,
- instanceNetwork: v3alpha1api.InstanceNetwork{
+ instanceNetwork: postgresflex.InstanceNetwork{
AccessScope: nil,
Acl: nil,
InstanceAddress: utils.Ptr("10.0.0.1"),
@@ -103,10 +155,10 @@ func TestCreateInstanceWaitHandler(t *testing.T) {
},
usersGetErrorStatus: 400,
wantErr: true,
- wantRes: &v3alpha1api.GetInstanceResponse{
- Id: "foo-bar",
- Status: InstanceStateSuccess,
- Network: v3alpha1api.InstanceNetwork{
+ wantRes: &postgresflex.GetInstanceResponse{
+ Id: utils.Ptr("foo-bar"),
+ Status: postgresflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr(InstanceStateSuccess)),
+ Network: &postgresflex.InstanceNetwork{
AccessScope: nil,
Acl: nil,
InstanceAddress: utils.Ptr("10.0.0.1"),
@@ -118,8 +170,8 @@ func TestCreateInstanceWaitHandler(t *testing.T) {
desc: "fail when response has no instance address",
instanceGetFails: false,
instanceState: InstanceStateSuccess,
- instanceNetwork: v3alpha1api.InstanceNetwork{
- AccessScope: (*v3alpha1api.InstanceNetworkAccessScope)(utils.Ptr("SNA")),
+ instanceNetwork: postgresflex.InstanceNetwork{
+ AccessScope: postgresflex.InstanceNetworkGetAccessScopeAttributeType(utils.Ptr("SNA")),
Acl: nil,
InstanceAddress: nil,
RouterAddress: utils.Ptr("10.0.0.1"),
@@ -131,8 +183,8 @@ func TestCreateInstanceWaitHandler(t *testing.T) {
desc: "timeout",
instanceGetFails: false,
instanceState: InstanceStateProgressing,
- instanceNetwork: v3alpha1api.InstanceNetwork{
- AccessScope: (*v3alpha1api.InstanceNetworkAccessScope)(utils.Ptr("SNA")),
+ instanceNetwork: postgresflex.InstanceNetwork{
+ AccessScope: postgresflex.InstanceNetworkGetAccessScopeAttributeType(utils.Ptr("SNA")),
Acl: nil,
InstanceAddress: utils.Ptr("10.0.0.1"),
RouterAddress: utils.Ptr("10.0.0.1"),
@@ -144,44 +196,17 @@ func TestCreateInstanceWaitHandler(t *testing.T) {
for _, tt := range tests {
t.Run(
tt.desc, func(t *testing.T) {
- instanceID := "foo-bar"
+ instanceId := "foo-bar"
- listUsersMock := func(_ v3alpha1api.ApiListUsersRequestRequest) (*v3alpha1api.ListUserResponse, error) {
- if tt.usersGetErrorStatus != 0 {
- return nil, &oapierror.GenericOpenAPIError{
- StatusCode: tt.usersGetErrorStatus,
- }
- }
-
- aux := int32(0)
- return &v3alpha1api.ListUserResponse{
- Pagination: v3alpha1api.Pagination{
- TotalRows: aux,
- },
- Users: []v3alpha1api.ListUser{},
- }, nil
+ apiClient := &apiClientInstanceMocked{
+ instanceId: instanceId,
+ instanceState: tt.instanceState,
+ instanceNetwork: tt.instanceNetwork,
+ instanceGetFails: tt.instanceGetFails,
+ usersGetErrorStatus: tt.usersGetErrorStatus,
}
- getInstanceMock := func(_ v3alpha1api.ApiGetInstanceRequestRequest) (*v3alpha1api.GetInstanceResponse, error) {
- if tt.instanceGetFails {
- return nil, &oapierror.GenericOpenAPIError{
- StatusCode: 500,
- }
- }
-
- return &v3alpha1api.GetInstanceResponse{
- Id: instanceID,
- Status: v3alpha1api.Status(tt.instanceState),
- Network: tt.instanceNetwork,
- }, nil
- }
-
- apiClientMock := v3alpha1api.DefaultAPIServiceMock{
- GetInstanceRequestExecuteMock: &getInstanceMock,
- ListUsersRequestExecuteMock: &listUsersMock,
- }
-
- handler := CreateInstanceWaitHandler(context.Background(), apiClientMock, "", "", instanceID)
+ handler := CreateInstanceWaitHandler(context.Background(), apiClient, "", "", instanceId)
gotRes, err := handler.SetTimeout(10 * time.Millisecond).SetSleepBeforeWait(1 * time.Millisecond).WaitWithContext(context.Background())
if (err != nil) != tt.wantErr {
@@ -201,25 +226,25 @@ func TestUpdateInstanceWaitHandler(t *testing.T) {
desc string
instanceGetFails bool
instanceState string
- instanceNetwork v3alpha1api.InstanceNetwork
+ instanceNetwork postgresflex.InstanceNetwork
wantErr bool
- wantRes *v3alpha1api.GetInstanceResponse
+ wantRes *postgresflex.GetInstanceResponse
}{
{
desc: "update_succeeded",
instanceGetFails: false,
instanceState: InstanceStateSuccess,
- instanceNetwork: v3alpha1api.InstanceNetwork{
+ instanceNetwork: postgresflex.InstanceNetwork{
AccessScope: nil,
Acl: nil,
InstanceAddress: utils.Ptr("10.0.0.1"),
RouterAddress: utils.Ptr("10.0.0.1"),
},
wantErr: false,
- wantRes: &v3alpha1api.GetInstanceResponse{
- Id: "foo-bar",
- Status: v3alpha1api.Status(InstanceStateSuccess),
- Network: v3alpha1api.InstanceNetwork{
+ wantRes: &postgresflex.GetInstanceResponse{
+ Id: utils.Ptr("foo-bar"),
+ Status: postgresflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr(InstanceStateSuccess)),
+ Network: &postgresflex.InstanceNetwork{
AccessScope: nil,
Acl: nil,
InstanceAddress: utils.Ptr("10.0.0.1"),
@@ -231,17 +256,17 @@ func TestUpdateInstanceWaitHandler(t *testing.T) {
desc: "update_failed",
instanceGetFails: false,
instanceState: InstanceStateFailed,
- instanceNetwork: v3alpha1api.InstanceNetwork{
+ instanceNetwork: postgresflex.InstanceNetwork{
AccessScope: nil,
Acl: nil,
InstanceAddress: utils.Ptr("10.0.0.1"),
RouterAddress: utils.Ptr("10.0.0.1"),
},
wantErr: true,
- wantRes: &v3alpha1api.GetInstanceResponse{
- Id: "foo-bar",
- Status: v3alpha1api.Status(InstanceStateFailed),
- Network: v3alpha1api.InstanceNetwork{
+ wantRes: &postgresflex.GetInstanceResponse{
+ Id: utils.Ptr("foo-bar"),
+ Status: postgresflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr(InstanceStateFailed)),
+ Network: &postgresflex.InstanceNetwork{
AccessScope: nil,
Acl: nil,
InstanceAddress: utils.Ptr("10.0.0.1"),
@@ -253,7 +278,7 @@ func TestUpdateInstanceWaitHandler(t *testing.T) {
desc: "update_failed_2",
instanceGetFails: false,
instanceState: InstanceStateEmpty,
- instanceNetwork: v3alpha1api.InstanceNetwork{
+ instanceNetwork: postgresflex.InstanceNetwork{
AccessScope: nil,
Acl: nil,
InstanceAddress: utils.Ptr("10.0.0.1"),
@@ -272,7 +297,7 @@ func TestUpdateInstanceWaitHandler(t *testing.T) {
desc: "timeout",
instanceGetFails: false,
instanceState: InstanceStateProgressing,
- instanceNetwork: v3alpha1api.InstanceNetwork{
+ instanceNetwork: postgresflex.InstanceNetwork{
AccessScope: nil,
Acl: nil,
InstanceAddress: utils.Ptr("10.0.0.1"),
@@ -285,38 +310,16 @@ func TestUpdateInstanceWaitHandler(t *testing.T) {
for _, tt := range tests {
t.Run(
tt.desc, func(t *testing.T) {
- instanceID := "foo-bar"
+ instanceId := "foo-bar"
- listUsersMock := func(_ v3alpha1api.ApiListUsersRequestRequest) (*v3alpha1api.ListUserResponse, error) {
- aux := int32(0)
- return &v3alpha1api.ListUserResponse{
- Pagination: v3alpha1api.Pagination{
- TotalRows: aux,
- },
- Users: []v3alpha1api.ListUser{},
- }, nil
+ apiClient := &apiClientInstanceMocked{
+ instanceId: instanceId,
+ instanceState: tt.instanceState,
+ instanceNetwork: tt.instanceNetwork,
+ instanceGetFails: tt.instanceGetFails,
}
- getInstanceMock := func(_ v3alpha1api.ApiGetInstanceRequestRequest) (*v3alpha1api.GetInstanceResponse, error) {
- if tt.instanceGetFails {
- return nil, &oapierror.GenericOpenAPIError{
- StatusCode: 500,
- }
- }
-
- return &v3alpha1api.GetInstanceResponse{
- Id: instanceID,
- Status: v3alpha1api.Status(tt.instanceState),
- Network: tt.instanceNetwork,
- }, nil
- }
-
- apiClientMock := v3alpha1api.DefaultAPIServiceMock{
- GetInstanceRequestExecuteMock: &getInstanceMock,
- ListUsersRequestExecuteMock: &listUsersMock,
- }
-
- handler := PartialUpdateInstanceWaitHandler(context.Background(), apiClientMock, "", "", instanceID)
+ handler := PartialUpdateInstanceWaitHandler(context.Background(), apiClient, "", "", instanceId)
gotRes, err := handler.SetTimeout(10 * time.Millisecond).WaitWithContext(context.Background())
if (err != nil) != tt.wantErr {
diff --git a/stackit/internal/wait/sqlserverflexalpha/wait.go b/stackit/internal/wait/sqlserverflexalpha/wait.go
index e9aefa2c..7484cbe9 100644
--- a/stackit/internal/wait/sqlserverflexalpha/wait.go
+++ b/stackit/internal/wait/sqlserverflexalpha/wait.go
@@ -1,3 +1,5 @@
+// Copyright (c) STACKIT
+
package sqlserverflexalpha
import (
@@ -11,8 +13,7 @@ import (
"github.com/hashicorp/terraform-plugin-log/tflog"
"github.com/stackitcloud/stackit-sdk-go/core/oapierror"
"github.com/stackitcloud/stackit-sdk-go/core/wait"
-
- "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3alpha1api"
+ sqlserverflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
)
// READY, PENDING, PROGRESSING, FAILURE, UNKNOWN,
@@ -26,362 +27,97 @@ const (
InstanceStateTerminating = "TERMINATING"
)
-// APIClientInterface Interface needed for tests
-type APIClientInterface interface {
- GetInstanceRequest(
- ctx context.Context,
- projectId, region, instanceId string,
- ) v3alpha1api.ApiGetInstanceRequestRequest
-
- GetDatabaseRequest(
- ctx context.Context,
- projectId string,
- region string,
- instanceId string,
- databaseName string,
- ) v3alpha1api.ApiGetDatabaseRequestRequest
-
- GetUserRequest(
- ctx context.Context,
- projectId string,
- region string,
- instanceId string,
- userId int64,
- ) v3alpha1api.ApiGetUserRequestRequest
-
- ListRolesRequest(
- ctx context.Context,
- projectId string,
- region string,
- instanceId string,
- ) v3alpha1api.ApiListRolesRequestRequest
-
- ListUsersRequest(
- ctx context.Context,
- projectId string,
- region string,
- instanceId string,
- ) v3alpha1api.ApiListUsersRequestRequest
-}
-
-// APIClientUserInterface Interface needed for tests
-type APIClientUserInterface interface {
- DeleteUserRequestExecute(
- ctx context.Context,
- projectId string,
- region string,
- instanceId string,
- userId int64,
- ) error
+// APIClientInstanceInterface Interface needed for tests
+type APIClientInstanceInterface interface {
+ GetInstanceRequestExecute(ctx context.Context, projectId, region, instanceId string) (*sqlserverflex.GetInstanceResponse, error)
}
// CreateInstanceWaitHandler will wait for instance creation
-func CreateInstanceWaitHandler(
- ctx context.Context,
- a APIClientInterface,
- projectId, instanceId, region string,
-) *wait.AsyncActionHandler[v3alpha1api.GetInstanceResponse] {
- handler := wait.New(
- func() (waitFinished bool, response *v3alpha1api.GetInstanceResponse, err error) {
- s, err := a.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
- if err != nil {
- return false, nil, err
- }
- if s == nil || s.Id != instanceId {
+func CreateInstanceWaitHandler(ctx context.Context, a APIClientInstanceInterface, projectId, instanceId, region string) *wait.AsyncActionHandler[sqlserverflex.GetInstanceResponse] {
+ handler := wait.New(func() (waitFinished bool, response *sqlserverflex.GetInstanceResponse, err error) {
+ s, err := a.GetInstanceRequestExecute(ctx, projectId, region, instanceId)
+ if err != nil {
+ return false, nil, err
+ }
+ if s == nil || s.Id == nil || *s.Id != instanceId || s.Status == nil {
+ return false, nil, nil
+ }
+ switch strings.ToLower(string(*s.Status)) {
+ case strings.ToLower(InstanceStateSuccess):
+ if s.Network.InstanceAddress == nil {
+ tflog.Info(ctx, "Waiting for instance_address")
return false, nil, nil
}
- switch strings.ToLower(string(s.Status)) {
- case strings.ToLower(InstanceStateSuccess):
- if s.Network.AccessScope != nil && *s.Network.AccessScope == "SNA" {
- if s.Network.InstanceAddress == nil {
- tflog.Info(ctx, "Waiting for instance_address")
- return false, nil, nil
- }
- if s.Network.RouterAddress == nil {
- tflog.Info(ctx, "Waiting for router_address")
- return false, nil, nil
- }
- }
-
- tflog.Info(ctx, "trying to get roles")
- time.Sleep(10 * time.Second)
- _, rolesErr := a.ListRolesRequest(ctx, projectId, region, instanceId).Execute()
- if rolesErr != nil {
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(rolesErr, &oapiErr)
- if !ok {
- return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError")
- }
- if oapiErr.StatusCode != http.StatusInternalServerError {
- tflog.Info(
- ctx, "got error from api", map[string]interface{}{
- "error": rolesErr.Error(),
- },
- )
- return false, nil, rolesErr
- }
- tflog.Info(
- ctx, "wait for get-roles to work hack", map[string]interface{}{},
- )
- time.Sleep(10 * time.Second)
- return false, nil, nil
- }
-
- tflog.Info(ctx, "trying to get users")
- time.Sleep(10 * time.Second)
- _, usersErr := a.ListUsersRequest(ctx, projectId, region, instanceId).Execute()
- if usersErr != nil {
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(usersErr, &oapiErr)
- if !ok {
- return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError")
- }
- if oapiErr.StatusCode != http.StatusInternalServerError {
- tflog.Info(
- ctx, "got error from api", map[string]interface{}{
- "error": rolesErr.Error(),
- },
- )
- return false, nil, usersErr
- }
- tflog.Info(
- ctx, "wait for get-users to work hack", map[string]interface{}{},
- )
- time.Sleep(10 * time.Second)
- return false, nil, nil
- }
- return true, s, nil
- case strings.ToLower(InstanceStateUnknown), strings.ToLower(InstanceStateFailed):
- return true, nil, fmt.Errorf("create failed for instance with id %s", instanceId)
- case strings.ToLower(InstanceStatePending), strings.ToLower(InstanceStateProcessing):
- tflog.Info(
- ctx, "request is being handled", map[string]interface{}{
- "status": s.Status,
- },
- )
- time.Sleep(10 * time.Second)
+ if s.Network.RouterAddress == nil {
+ tflog.Info(ctx, "Waiting for router_address")
return false, nil, nil
- default:
- tflog.Info(
- ctx, "Wait (create) received unknown status", map[string]interface{}{
- "instanceId": instanceId,
- "status": s.Status,
- },
- )
- return true, nil, errors.New("unknown status received")
}
- },
- )
+ return true, s, nil
+ case strings.ToLower(InstanceStateUnknown), strings.ToLower(InstanceStateFailed):
+ return true, s, fmt.Errorf("create failed for instance with id %s", instanceId)
+ default:
+ tflog.Info(ctx, "Wait (create) received unknown status", map[string]interface{}{
+ "instanceId": instanceId,
+ "status": s.Status,
+ })
+ return false, s, nil
+ }
+ })
+ handler.SetTimeout(45 * time.Minute)
+ handler.SetSleepBeforeWait(15 * time.Second)
return handler
}
// UpdateInstanceWaitHandler will wait for instance update
-func UpdateInstanceWaitHandler(
- ctx context.Context,
- a APIClientInterface,
- projectId, instanceId, region string,
-) *wait.AsyncActionHandler[v3alpha1api.GetInstanceResponse] {
- handler := wait.New(
- func() (waitFinished bool, response *v3alpha1api.GetInstanceResponse, err error) {
- s, err := a.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
- if err != nil {
- return false, nil, err
- }
- if s == nil || s.Id != instanceId {
- return false, nil, nil
- }
- switch strings.ToLower(string(s.Status)) {
- case strings.ToLower(InstanceStateSuccess):
- return true, s, nil
- case strings.ToLower(InstanceStateUnknown), strings.ToLower(InstanceStateFailed):
- return true, s, fmt.Errorf("update failed for instance with id %s", instanceId)
- case strings.ToLower(InstanceStatePending), strings.ToLower(InstanceStateProcessing):
- tflog.Info(
- ctx, "request is being handled", map[string]interface{}{
- "status": s.Status,
- },
- )
- return false, s, nil
- default:
- tflog.Info(
- ctx, "Wait (update) received unknown status", map[string]interface{}{
- "instanceId": instanceId,
- "status": s.Status,
- },
- )
- return false, s, nil
- }
- },
- )
+func UpdateInstanceWaitHandler(ctx context.Context, a APIClientInstanceInterface, projectId, instanceId, region string) *wait.AsyncActionHandler[sqlserverflex.GetInstanceResponse] {
+ handler := wait.New(func() (waitFinished bool, response *sqlserverflex.GetInstanceResponse, err error) {
+ s, err := a.GetInstanceRequestExecute(ctx, projectId, region, instanceId)
+ if err != nil {
+ return false, nil, err
+ }
+ if s == nil || s.Id == nil || *s.Id != instanceId || s.Status == nil {
+ return false, nil, nil
+ }
+ switch strings.ToLower(string(*s.Status)) {
+ case strings.ToLower(InstanceStateSuccess):
+ return true, s, nil
+ case strings.ToLower(InstanceStateUnknown), strings.ToLower(InstanceStateFailed):
+ return true, s, fmt.Errorf("update failed for instance with id %s", instanceId)
+ default:
+ tflog.Info(ctx, "Wait (update) received unknown status", map[string]interface{}{
+ "instanceId": instanceId,
+ "status": s.Status,
+ })
+ return false, s, nil
+ }
+ })
+ handler.SetSleepBeforeWait(15 * time.Second)
+ handler.SetTimeout(45 * time.Minute)
return handler
}
+// PartialUpdateInstanceWaitHandler will wait for instance update
+func PartialUpdateInstanceWaitHandler(ctx context.Context, a APIClientInstanceInterface, projectId, instanceId, region string) *wait.AsyncActionHandler[sqlserverflex.GetInstanceResponse] {
+ return UpdateInstanceWaitHandler(ctx, a, projectId, instanceId, region)
+}
+
// DeleteInstanceWaitHandler will wait for instance deletion
-func DeleteInstanceWaitHandler(
- ctx context.Context,
- a APIClientInterface,
- projectId, instanceId, region string,
-) *wait.AsyncActionHandler[v3alpha1api.GetInstanceResponse] {
- handler := wait.New(
- func() (waitFinished bool, response *v3alpha1api.GetInstanceResponse, err error) {
- s, err := a.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
- if err == nil {
- return false, s, nil
- }
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(err, &oapiErr)
- if !ok {
- return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError")
- }
- if oapiErr.StatusCode != http.StatusNotFound {
- return false, nil, err
- }
- return true, nil, nil
- },
- )
- handler.SetTimeout(30 * time.Minute)
- return handler
-}
-
-// CreateDatabaseWaitHandler will wait for instance creation
-func CreateDatabaseWaitHandler(
- ctx context.Context,
- a APIClientInterface,
- projectId, instanceId, region, databaseName string,
-) *wait.AsyncActionHandler[v3alpha1api.GetDatabaseResponse] {
- handler := wait.New(
- func() (waitFinished bool, response *v3alpha1api.GetDatabaseResponse, err error) {
- s, err := a.GetDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute()
- if err != nil {
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(err, &oapiErr)
- if !ok {
- return false, nil, fmt.Errorf(
- "get database - could not convert error to oapierror.GenericOpenAPIError: %s",
- err.Error(),
- )
- }
- if oapiErr.StatusCode != http.StatusNotFound {
- return false, nil, err
- }
- return false, nil, nil
- }
- if s == nil || s.Name != databaseName {
- return false, nil, errors.New("response did return different result")
- }
- return true, s, nil
- },
- )
- return handler
-}
-
-// CreateUserWaitHandler will wait for instance creation
-func CreateUserWaitHandler(
- ctx context.Context,
- a APIClientInterface,
- projectId, instanceId, region string,
- userId int64,
-) *wait.AsyncActionHandler[v3alpha1api.GetUserResponse] {
- handler := wait.New(
- func() (waitFinished bool, response *v3alpha1api.GetUserResponse, err error) {
- s, err := a.GetUserRequest(ctx, projectId, region, instanceId, userId).Execute()
- if err != nil {
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(err, &oapiErr)
- if !ok {
- return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError")
- }
- if oapiErr.StatusCode != http.StatusNotFound {
- return false, nil, err
- }
- return false, nil, nil
- }
- return true, s, nil
- },
- )
- return handler
-}
-
-// WaitForUserWaitHandler will wait for instance creation
-func WaitForUserWaitHandler(
- ctx context.Context,
- a APIClientInterface,
- projectId, instanceId, region, userName string,
-) *wait.AsyncActionHandler[v3alpha1api.ListUserResponse] {
- startTime := time.Now()
- timeOut := 2 * time.Minute
-
- handler := wait.New(
- func() (waitFinished bool, response *v3alpha1api.ListUserResponse, err error) {
- if time.Since(startTime) > timeOut {
- return false, nil, errors.New("ran into timeout")
- }
- s, err := a.ListUsersRequest(ctx, projectId, region, instanceId).Size(100).Execute()
- if err != nil {
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(err, &oapiErr)
- if !ok {
- return false, nil, fmt.Errorf(
- "wait (list users) could not convert error to oapierror.GenericOpenAPIError: %s",
- err.Error(),
- )
- }
- if oapiErr.StatusCode != http.StatusNotFound {
- return false, nil, err
- }
- tflog.Info(
- ctx, "Wait (list users) still waiting", map[string]interface{}{},
- )
-
- return false, nil, nil
- }
- users, ok := s.GetUsersOk()
- if !ok {
- return false, nil, errors.New("no users found")
- }
-
- for _, u := range users {
- if u.GetUsername() == userName {
- return true, s, nil
- }
- }
- tflog.Info(
- ctx, "Wait (list users) user still not present", map[string]interface{}{},
- )
+func DeleteInstanceWaitHandler(ctx context.Context, a APIClientInstanceInterface, projectId, instanceId, region string) *wait.AsyncActionHandler[struct{}] {
+ handler := wait.New(func() (waitFinished bool, response *struct{}, err error) {
+ _, err = a.GetInstanceRequestExecute(ctx, projectId, region, instanceId)
+ if err == nil {
return false, nil, nil
- },
- )
- return handler
-}
-
-// DeleteUserWaitHandler will wait for instance deletion
-func DeleteUserWaitHandler(
- ctx context.Context,
- a APIClientInterface,
- projectId, region, instanceId string,
- userId int64,
-) *wait.AsyncActionHandler[struct{}] {
- handler := wait.New(
- func() (waitFinished bool, response *struct{}, err error) {
- _, err = a.GetUserRequest(ctx, projectId, region, instanceId, userId).Execute()
- if err == nil {
- return false, nil, nil
- }
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(err, &oapiErr)
- if !ok {
- return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError")
- }
-
- switch oapiErr.StatusCode {
- case http.StatusNotFound:
- return true, nil, nil
- default:
- return false, nil, err
- }
- },
- )
+ }
+ var oapiErr *oapierror.GenericOpenAPIError
+ ok := errors.As(err, &oapiErr)
+ if !ok {
+ return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError")
+ }
+ if oapiErr.StatusCode != http.StatusNotFound {
+ return false, nil, err
+ }
+ return true, nil, nil
+ })
handler.SetTimeout(15 * time.Minute)
- handler.SetSleepBeforeWait(15 * time.Second)
return handler
}
diff --git a/stackit/internal/wait/sqlserverflexalpha/wait_test.go b/stackit/internal/wait/sqlserverflexalpha/wait_test.go
index ed44bd22..7c0e52a9 100644
--- a/stackit/internal/wait/sqlserverflexalpha/wait_test.go
+++ b/stackit/internal/wait/sqlserverflexalpha/wait_test.go
@@ -1,66 +1,91 @@
+// Copyright (c) STACKIT
+
package sqlserverflexalpha
import (
"context"
- "reflect"
"testing"
"time"
"github.com/google/go-cmp/cmp"
"github.com/stackitcloud/stackit-sdk-go/core/oapierror"
"github.com/stackitcloud/stackit-sdk-go/core/utils"
- "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3alpha1api"
+ sqlserverflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
)
// Used for testing instance operations
+type apiClientInstanceMocked struct {
+ instanceId string
+ instanceState string
+ instanceNetwork sqlserverflex.InstanceNetwork
+ instanceIsDeleted bool
+ instanceGetFails bool
+}
+
+func (a *apiClientInstanceMocked) GetInstanceRequestExecute(_ context.Context, _, _, _ string) (*sqlserverflex.GetInstanceResponse, error) {
+ if a.instanceGetFails {
+ return nil, &oapierror.GenericOpenAPIError{
+ StatusCode: 500,
+ }
+ }
+
+ if a.instanceIsDeleted {
+ return nil, &oapierror.GenericOpenAPIError{
+ StatusCode: 404,
+ }
+ }
+
+ return &sqlserverflex.GetInstanceResponse{
+ Id: &a.instanceId,
+ Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(&a.instanceState),
+ Network: &a.instanceNetwork,
+ }, nil
+}
func TestCreateInstanceWaitHandler(t *testing.T) {
- instanceID := utils.Ptr("foo")
+ t.Skip("skipping - needs refactoring")
tests := []struct {
desc string
- instanceID string
instanceGetFails bool
instanceState string
- instanceNetwork v3alpha1api.InstanceNetwork
+ instanceNetwork sqlserverflex.InstanceNetwork
usersGetErrorStatus int
wantErr bool
- wantRes *v3alpha1api.GetInstanceResponse
+ wantRes *sqlserverflex.GetInstanceResponse
}{
- //{
- // desc: "create_succeeded",
- // instanceId: *instanceId,
- // instanceGetFails: false,
- // instanceState: *stateSuccess,
- // instanceNetwork: v3alpha1api.InstanceNetwork{
- // AccessScope: nil,
- // Acl: nil,
- // InstanceAddress: utils.Ptr("10.0.0.1"),
- // RouterAddress: utils.Ptr("10.0.0.2"),
- // },
- // wantErr: false,
- // wantRes: &v3alpha1api.GetInstanceResponse{
- // BackupSchedule: nil,
- // Edition: nil,
- // Encryption: nil,
- // FlavorId: nil,
- // Id: instanceId,
- // IsDeletable: nil,
- // Name: nil,
- // Network: &v3alpha1api.InstanceNetwork{
- // AccessScope: nil,
- // Acl: nil,
- // InstanceAddress: utils.Ptr("10.0.0.1"),
- // RouterAddress: utils.Ptr("10.0.0.2"),
- // },
- // Replicas: nil,
- // RetentionDays: nil,
- // Status: v3alpha1api.GetInstanceResponseGetStatusAttributeType(stateSuccess),
- // Storage: nil,
- // Version: nil,
- // },
- // },
+ {
+ desc: "create_succeeded",
+ instanceGetFails: false,
+ instanceState: InstanceStateSuccess,
+ instanceNetwork: sqlserverflex.InstanceNetwork{
+ AccessScope: nil,
+ Acl: nil,
+ InstanceAddress: utils.Ptr("10.0.0.1"),
+ RouterAddress: utils.Ptr("10.0.0.2"),
+ },
+ wantErr: false,
+ wantRes: &sqlserverflex.GetInstanceResponse{
+ BackupSchedule: nil,
+ Edition: nil,
+ Encryption: nil,
+ FlavorId: nil,
+ Id: nil,
+ IsDeletable: nil,
+ Name: nil,
+ Network: &sqlserverflex.InstanceNetwork{
+ AccessScope: nil,
+ Acl: nil,
+ InstanceAddress: utils.Ptr("10.0.0.1"),
+ RouterAddress: utils.Ptr("10.0.0.2"),
+ },
+ Replicas: nil,
+ RetentionDays: nil,
+ Status: nil,
+ Storage: nil,
+ Version: nil,
+ },
+ },
{
desc: "create_failed",
- instanceID: *instanceID,
instanceGetFails: false,
instanceState: InstanceStateFailed,
wantErr: true,
@@ -68,7 +93,6 @@ func TestCreateInstanceWaitHandler(t *testing.T) {
},
{
desc: "create_failed_2",
- instanceID: *instanceID,
instanceGetFails: false,
instanceState: InstanceStateEmpty,
wantErr: true,
@@ -76,14 +100,12 @@ func TestCreateInstanceWaitHandler(t *testing.T) {
},
{
desc: "instance_get_fails",
- instanceID: *instanceID,
instanceGetFails: true,
wantErr: true,
wantRes: nil,
},
{
desc: "timeout",
- instanceID: *instanceID,
instanceGetFails: false,
instanceState: InstanceStateProcessing,
wantErr: true,
@@ -91,42 +113,31 @@ func TestCreateInstanceWaitHandler(t *testing.T) {
},
}
for _, tt := range tests {
- t.Run(
- tt.desc, func(t *testing.T) {
- mockCall := func(_ v3alpha1api.ApiGetInstanceRequestRequest) (*v3alpha1api.GetInstanceResponse, error) {
- if tt.instanceGetFails {
- return nil, &oapierror.GenericOpenAPIError{
- StatusCode: 500,
- }
- }
+ t.Run(tt.desc, func(t *testing.T) {
+ instanceId := "foo-bar"
- return &v3alpha1api.GetInstanceResponse{
- Id: tt.instanceID,
- Status: v3alpha1api.Status(tt.instanceState),
- Network: tt.instanceNetwork,
- }, nil
- }
+ apiClient := &apiClientInstanceMocked{
+ instanceId: instanceId,
+ instanceState: tt.instanceState,
+ instanceGetFails: tt.instanceGetFails,
+ }
- apiClient := v3alpha1api.DefaultAPIServiceMock{
- GetInstanceRequestExecuteMock: &mockCall,
- }
+ handler := CreateInstanceWaitHandler(context.Background(), apiClient, "", instanceId, "")
- handler := CreateInstanceWaitHandler(context.Background(), apiClient, "", tt.instanceID, "")
+ gotRes, err := handler.SetTimeout(10 * time.Millisecond).SetSleepBeforeWait(1 * time.Millisecond).WaitWithContext(context.Background())
+ if (err != nil) != tt.wantErr {
+ t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr)
+ }
- gotRes, err := handler.SetTimeout(10 * time.Millisecond).SetSleepBeforeWait(1 * time.Millisecond).WaitWithContext(context.Background())
- if (err != nil) != tt.wantErr {
- t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr)
- }
-
- if !reflect.DeepEqual(gotRes, tt.wantRes) {
- t.Fatalf("handler gotRes = %v, want %v", gotRes, tt.wantRes)
- }
- },
- )
+ if !cmp.Equal(gotRes, tt.wantRes) {
+ t.Fatalf("handler gotRes = %v, want %v", gotRes, tt.wantRes)
+ }
+ })
}
}
func TestUpdateInstanceWaitHandler(t *testing.T) {
+ t.Skip("skipping - needs refactoring")
tests := []struct {
desc string
instanceGetFails bool
@@ -170,48 +181,34 @@ func TestUpdateInstanceWaitHandler(t *testing.T) {
},
}
for _, tt := range tests {
- t.Run(
- tt.desc, func(t *testing.T) {
- instanceID := "foo-bar"
+ t.Run(tt.desc, func(t *testing.T) {
+ instanceId := "foo-bar"
- mockCall := func(_ v3alpha1api.ApiGetInstanceRequestRequest) (*v3alpha1api.GetInstanceResponse, error) {
- if tt.instanceGetFails {
- return nil, &oapierror.GenericOpenAPIError{
- StatusCode: 500,
- }
- }
+ apiClient := &apiClientInstanceMocked{
+ instanceId: instanceId,
+ instanceState: tt.instanceState,
+ instanceGetFails: tt.instanceGetFails,
+ }
- return &v3alpha1api.GetInstanceResponse{
- Id: instanceID,
- Status: v3alpha1api.Status(tt.instanceState),
- //Network: tt.instanceNetwork,
- }, nil
+ var wantRes *sqlserverflex.GetInstanceResponse
+ if tt.wantResp {
+ wantRes = &sqlserverflex.GetInstanceResponse{
+ Id: &instanceId,
+ Status: sqlserverflex.GetInstanceResponseGetStatusAttributeType(utils.Ptr(tt.instanceState)),
}
+ }
- apiClient := v3alpha1api.DefaultAPIServiceMock{
- GetInstanceRequestExecuteMock: &mockCall,
- }
+ handler := UpdateInstanceWaitHandler(context.Background(), apiClient, "", instanceId, "")
- var wantRes *v3alpha1api.GetInstanceResponse
- if tt.wantResp {
- wantRes = &v3alpha1api.GetInstanceResponse{
- Id: instanceID,
- Status: v3alpha1api.Status(tt.instanceState),
- }
- }
+ gotRes, err := handler.SetTimeout(10 * time.Millisecond).SetSleepBeforeWait(1 * time.Millisecond).WaitWithContext(context.Background())
- handler := UpdateInstanceWaitHandler(context.Background(), apiClient, "", instanceID, "")
-
- gotRes, err := handler.SetTimeout(10 * time.Millisecond).SetSleepBeforeWait(1 * time.Millisecond).WaitWithContext(context.Background())
-
- if (err != nil) != tt.wantErr {
- t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr)
- }
- if !cmp.Equal(gotRes, wantRes) {
- t.Fatalf("handler gotRes = %v, want %v", gotRes, wantRes)
- }
- },
- )
+ if (err != nil) != tt.wantErr {
+ t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr)
+ }
+ if !cmp.Equal(gotRes, wantRes) {
+ t.Fatalf("handler gotRes = %v, want %v", gotRes, wantRes)
+ }
+ })
}
}
@@ -241,42 +238,23 @@ func TestDeleteInstanceWaitHandler(t *testing.T) {
},
}
for _, tt := range tests {
- t.Run(
- tt.desc, func(t *testing.T) {
- instanceID := "foo-bar"
+ t.Run(tt.desc, func(t *testing.T) {
+ instanceId := "foo-bar"
- mockCall := func(_ v3alpha1api.ApiGetInstanceRequestRequest) (*v3alpha1api.GetInstanceResponse, error) {
- if tt.instanceGetFails {
- return nil, &oapierror.GenericOpenAPIError{
- StatusCode: 500,
- }
- }
+ apiClient := &apiClientInstanceMocked{
+ instanceGetFails: tt.instanceGetFails,
+ instanceIsDeleted: tt.instanceState == InstanceStateSuccess,
+ instanceId: instanceId,
+ instanceState: tt.instanceState,
+ }
- if tt.instanceState == InstanceStateSuccess {
- return nil, &oapierror.GenericOpenAPIError{
- StatusCode: 404,
- }
- }
+ handler := DeleteInstanceWaitHandler(context.Background(), apiClient, "", instanceId, "")
- return &v3alpha1api.GetInstanceResponse{
- Id: instanceID,
- Status: v3alpha1api.Status(tt.instanceState),
- //Network: tt.instanceNetwork,
- }, nil
- }
+ _, err := handler.SetTimeout(10 * time.Millisecond).WaitWithContext(context.Background())
- apiClient := v3alpha1api.DefaultAPIServiceMock{
- GetInstanceRequestExecuteMock: &mockCall,
- }
-
- handler := DeleteInstanceWaitHandler(context.Background(), apiClient, "", instanceID, "")
-
- _, err := handler.SetTimeout(10 * time.Millisecond).WaitWithContext(context.Background())
-
- if (err != nil) != tt.wantErr {
- t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr)
- }
- },
- )
+ if (err != nil) != tt.wantErr {
+ t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr)
+ }
+ })
}
}
diff --git a/stackit/internal/wait/sqlserverflexbeta/wait.go b/stackit/internal/wait/sqlserverflexbeta/wait.go
deleted file mode 100644
index a13def0f..00000000
--- a/stackit/internal/wait/sqlserverflexbeta/wait.go
+++ /dev/null
@@ -1,405 +0,0 @@
-package sqlserverflexbeta
-
-import (
- "context"
- "errors"
- "fmt"
- "net/http"
- "strings"
- "time"
-
- "github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/stackitcloud/stackit-sdk-go/core/oapierror"
- "github.com/stackitcloud/stackit-sdk-go/core/wait"
-
- sqlserverflex "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3beta1api"
-)
-
-// READY, PENDING, PROGRESSING, FAILURE, UNKNOWN,
-const (
- InstanceStateEmpty = ""
- InstanceStateSuccess = "READY"
- InstanceStatePending = "PENDING"
- InstanceStateProcessing = "PROGRESSING"
- InstanceStateFailed = "FAILURE"
- InstanceStateUnknown = "UNKNOWN"
- InstanceStateTerminating = "TERMINATING"
-)
-
-// APIClientInterface Interface needed for tests
-type APIClientInterface interface {
- GetInstanceRequest(
- ctx context.Context,
- projectId, region, instanceId string,
- ) sqlserverflex.ApiGetInstanceRequestRequest
- GetDatabaseRequest(
- ctx context.Context,
- projectId string,
- region string,
- instanceId string,
- databaseName string,
- ) sqlserverflex.ApiGetDatabaseRequestRequest
- GetUserRequest(
- ctx context.Context,
- projectId string,
- region string,
- instanceId string,
- userId int64,
- ) sqlserverflex.ApiGetUserRequestRequest
-
- ListRolesRequest(
- ctx context.Context,
- projectId string,
- region string,
- instanceId string,
- ) sqlserverflex.ApiListRolesRequestRequest
-
- ListUsersRequest(
- ctx context.Context,
- projectId string,
- region string,
- instanceId string,
- ) sqlserverflex.ApiListUsersRequestRequest
-}
-
-// APIClientUserInterface Interface needed for tests
-type APIClientUserInterface interface {
- DeleteUserRequestExecute(
- ctx context.Context,
- projectId string,
- region string,
- instanceId string,
- userId int64,
- ) error
-}
-
-// CreateInstanceWaitHandler will wait for instance creation
-func CreateInstanceWaitHandler(
- ctx context.Context,
- a APIClientInterface,
- projectId, instanceId, region string,
-) *wait.AsyncActionHandler[sqlserverflex.GetInstanceResponse] {
- handler := wait.New(
- func() (waitFinished bool, response *sqlserverflex.GetInstanceResponse, err error) {
- s, err := a.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
- if err != nil {
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(err, &oapiErr)
- if !ok {
- return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError: %w", err)
- }
- switch oapiErr.StatusCode {
- case http.StatusNotFound:
- return false, nil, nil
- default:
- return false, nil, fmt.Errorf("api error: %w", err)
- }
- }
- if s == nil || s.Id != instanceId {
- return false, nil, nil
- }
- switch strings.ToLower(string(s.Status)) {
- case strings.ToLower(InstanceStateSuccess):
- if s.Network.AccessScope != nil && *s.Network.AccessScope == "SNA" {
- if s.Network.InstanceAddress == nil {
- tflog.Info(ctx, "Waiting for instance_address")
- return false, nil, nil
- }
- if s.Network.RouterAddress == nil {
- tflog.Info(ctx, "Waiting for router_address")
- return false, nil, nil
- }
- }
-
- tflog.Info(ctx, "trying to get roles")
- time.Sleep(10 * time.Second)
- _, rolesErr := a.ListRolesRequest(ctx, projectId, region, instanceId).Execute()
- if rolesErr != nil {
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(rolesErr, &oapiErr)
- if !ok {
- return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError")
- }
- if oapiErr.StatusCode != http.StatusInternalServerError {
- tflog.Info(
- ctx, "got error from api", map[string]interface{}{
- "error": rolesErr.Error(),
- },
- )
- return false, nil, rolesErr
- }
- tflog.Info(
- ctx, "wait for get-roles to work hack", map[string]interface{}{},
- )
- time.Sleep(10 * time.Second)
- return false, nil, nil
- }
-
- tflog.Info(ctx, "trying to get users")
- time.Sleep(10 * time.Second)
- _, usersErr := a.ListUsersRequest(ctx, projectId, region, instanceId).Execute()
- if usersErr != nil {
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(usersErr, &oapiErr)
- if !ok {
- return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError")
- }
- if oapiErr.StatusCode != http.StatusInternalServerError {
- tflog.Info(
- ctx, "got error from api", map[string]interface{}{
- "error": rolesErr.Error(),
- },
- )
- return false, nil, usersErr
- }
- tflog.Info(
- ctx, "wait for get-users to work hack", map[string]interface{}{},
- )
- time.Sleep(10 * time.Second)
- return false, nil, nil
- }
- return true, s, nil
- case strings.ToLower(InstanceStateUnknown):
- return true, nil, fmt.Errorf(
- "create failed for instance %s with status %s",
- instanceId,
- InstanceStateUnknown,
- )
- case strings.ToLower(InstanceStateFailed):
- return true, nil, fmt.Errorf(
- "create failed for instance %s with status %s",
- instanceId,
- InstanceStateFailed,
- )
- case strings.ToLower(InstanceStatePending), strings.ToLower(InstanceStateProcessing):
- tflog.Info(
- ctx, "request is being handled", map[string]interface{}{
- "status": s.Status,
- },
- )
- time.Sleep(10 * time.Second)
- return false, nil, nil
- default:
- tflog.Info(
- ctx, "Wait (create) received unknown status", map[string]interface{}{
- "instanceId": instanceId,
- "status": s.Status,
- },
- )
- return true, nil, errors.New("unknown status received")
- }
- },
- )
- return handler
-}
-
-// UpdateInstanceWaitHandler will wait for instance update
-func UpdateInstanceWaitHandler(
- ctx context.Context,
- a APIClientInterface,
- projectId, instanceId, region string,
-) *wait.AsyncActionHandler[sqlserverflex.GetInstanceResponse] {
- handler := wait.New(
- func() (waitFinished bool, response *sqlserverflex.GetInstanceResponse, err error) {
- s, err := a.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
- if err != nil {
- return false, nil, err
- }
- if s == nil || s.Id != instanceId {
- return false, nil, nil
- }
- switch strings.ToLower(string(s.Status)) {
- case strings.ToLower(InstanceStateSuccess):
- return true, s, nil
- case strings.ToLower(InstanceStateUnknown), strings.ToLower(InstanceStateFailed):
- return true, s, fmt.Errorf("update failed for instance with id %s", instanceId)
- case strings.ToLower(InstanceStatePending), strings.ToLower(InstanceStateProcessing):
- tflog.Info(
- ctx, "request is being handled", map[string]interface{}{
- "status": s.Status,
- },
- )
- return false, s, nil
- default:
- tflog.Info(
- ctx, "Wait (update) received unknown status", map[string]interface{}{
- "instanceId": instanceId,
- "status": s.Status,
- },
- )
- return false, s, nil
- }
- },
- )
- return handler
-}
-
-// DeleteInstanceWaitHandler will wait for instance deletion
-func DeleteInstanceWaitHandler(
- ctx context.Context,
- a APIClientInterface,
- projectId, instanceId, region string,
-) *wait.AsyncActionHandler[sqlserverflex.GetInstanceResponse] {
- handler := wait.New(
- func() (waitFinished bool, response *sqlserverflex.GetInstanceResponse, err error) {
- s, err := a.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
- if err == nil {
- return false, s, nil
- }
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(err, &oapiErr)
- if !ok {
- return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError")
- }
- if oapiErr.StatusCode != http.StatusNotFound {
- return false, nil, err
- }
- return true, nil, nil
- },
- )
- handler.SetTimeout(30 * time.Minute)
- return handler
-}
-
-// CreateDatabaseWaitHandler will wait for instance creation
-func CreateDatabaseWaitHandler(
- ctx context.Context,
- a APIClientInterface,
- projectId, instanceId, region, databaseName string,
-) *wait.AsyncActionHandler[sqlserverflex.GetDatabaseResponse] {
- handler := wait.New(
- func() (waitFinished bool, response *sqlserverflex.GetDatabaseResponse, err error) {
- s, err := a.GetDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute()
- if err != nil {
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(err, &oapiErr)
- if !ok {
- return false, nil, fmt.Errorf(
- "get database - could not convert error to oapierror.GenericOpenAPIError: %s",
- err.Error(),
- )
- }
- if oapiErr.StatusCode != http.StatusNotFound {
- return false, nil, err
- }
- return false, nil, nil
- }
- if s == nil || s.Name != databaseName {
- return false, nil, errors.New("response did return different result")
- }
- return true, s, nil
- },
- )
- return handler
-}
-
-// CreateUserWaitHandler will wait for instance creation
-func CreateUserWaitHandler(
- ctx context.Context,
- a APIClientInterface,
- projectId, instanceId, region string,
- userId int64,
-) *wait.AsyncActionHandler[sqlserverflex.GetUserResponse] {
- handler := wait.New(
- func() (waitFinished bool, response *sqlserverflex.GetUserResponse, err error) {
- s, err := a.GetUserRequest(ctx, projectId, region, instanceId, userId).Execute()
- if err != nil {
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(err, &oapiErr)
- if !ok {
- return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError")
- }
- if oapiErr.StatusCode != http.StatusNotFound {
- return false, nil, err
- }
- return false, nil, nil
- }
- return true, s, nil
- },
- )
- return handler
-}
-
-// WaitForUserWaitHandler will wait for instance creation
-func WaitForUserWaitHandler(
- ctx context.Context,
- a APIClientInterface,
- projectId, instanceId, region, userName string,
-) *wait.AsyncActionHandler[sqlserverflex.ListUserResponse] {
- startTime := time.Now()
- timeOut := 2 * time.Minute
-
- handler := wait.New(
- func() (waitFinished bool, response *sqlserverflex.ListUserResponse, err error) {
- if time.Since(startTime) > timeOut {
- return false, nil, errors.New("ran into timeout")
- }
- s, err := a.ListUsersRequest(ctx, projectId, region, instanceId).Size(100).Execute()
- if err != nil {
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(err, &oapiErr)
- if !ok {
- return false, nil, fmt.Errorf(
- "wait (list users) could not convert error to oapierror.GenericOpenAPIError: %s",
- err.Error(),
- )
- }
- if oapiErr.StatusCode != http.StatusNotFound {
- return false, nil, err
- }
- tflog.Info(
- ctx, "Wait (list users) still waiting", map[string]interface{}{},
- )
-
- return false, nil, nil
- }
- users, ok := s.GetUsersOk()
- if !ok {
- return false, nil, errors.New("no users found")
- }
-
- for _, u := range users {
- if u.GetUsername() == userName {
- return true, s, nil
- }
- }
- tflog.Info(
- ctx, "Wait (list users) user still not present", map[string]interface{}{},
- )
- return false, nil, nil
- },
- )
- return handler
-}
-
-// DeleteUserWaitHandler will wait for instance deletion
-func DeleteUserWaitHandler(
- ctx context.Context,
- a APIClientInterface,
- projectId, region, instanceId string,
- userId int64,
-) *wait.AsyncActionHandler[struct{}] {
- handler := wait.New(
- func() (waitFinished bool, response *struct{}, err error) {
- _, err = a.GetUserRequest(ctx, projectId, region, instanceId, userId).Execute()
- if err == nil {
- return false, nil, nil
- }
- var oapiErr *oapierror.GenericOpenAPIError
- ok := errors.As(err, &oapiErr)
- if !ok {
- return false, nil, fmt.Errorf("could not convert error to oapierror.GenericOpenAPIError")
- }
-
- switch oapiErr.StatusCode {
- case http.StatusNotFound:
- return true, nil, nil
- default:
- return false, nil, err
- }
- },
- )
- handler.SetTimeout(15 * time.Minute)
- handler.SetSleepBeforeWait(15 * time.Second)
- return handler
-}
diff --git a/stackit/internal/wait/sqlserverflexbeta/wait_test.go b/stackit/internal/wait/sqlserverflexbeta/wait_test.go
deleted file mode 100644
index 44a389f8..00000000
--- a/stackit/internal/wait/sqlserverflexbeta/wait_test.go
+++ /dev/null
@@ -1,305 +0,0 @@
-package sqlserverflexbeta
-
-import (
- "context"
- "reflect"
- "testing"
- "time"
-
- "github.com/google/go-cmp/cmp"
- "github.com/stackitcloud/stackit-sdk-go/core/oapierror"
- "github.com/stackitcloud/stackit-sdk-go/core/utils"
- "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3beta1api"
-)
-
-func TestCreateInstanceWaitHandler(t *testing.T) {
- instanceID := utils.Ptr("foo")
- tests := []struct {
- desc string
- instanceID string
- instanceGetFails bool
- instanceState string
- instanceNetwork v3beta1api.InstanceNetwork
- usersGetErrorStatus int
- wantErr bool
- wantRes *v3beta1api.GetInstanceResponse
- }{
- {
- desc: "create_succeeded_default_values",
- instanceID: "instance1",
- instanceGetFails: false,
- instanceState: InstanceStateSuccess,
- instanceNetwork: v3beta1api.InstanceNetwork{
- AccessScope: (*v3beta1api.InstanceNetworkAccessScope)(utils.Ptr("PUBLIC")),
- Acl: nil,
- InstanceAddress: utils.Ptr("10.0.0.1"),
- RouterAddress: utils.Ptr("10.0.0.2"),
- },
- wantErr: false,
- wantRes: &v3beta1api.GetInstanceResponse{
- BackupSchedule: "",
- Edition: "",
- Encryption: nil,
- FlavorId: "",
- Id: "instance1",
- IsDeletable: false,
- Name: "",
- Network: v3beta1api.InstanceNetwork{
- AccessScope: (*v3beta1api.InstanceNetworkAccessScope)(utils.Ptr("PUBLIC")),
- Acl: nil,
- InstanceAddress: utils.Ptr("10.0.0.1"),
- RouterAddress: utils.Ptr("10.0.0.2"),
- },
- Replicas: 0,
- RetentionDays: 0,
- Status: v3beta1api.Status(InstanceStateSuccess),
- Storage: v3beta1api.Storage{},
- Version: "",
- },
- },
- {
- desc: "create_failed",
- instanceID: *instanceID,
- instanceGetFails: false,
- instanceState: InstanceStateFailed,
- wantErr: true,
- wantRes: nil,
- },
- {
- desc: "create_failed_2",
- instanceID: *instanceID,
- instanceGetFails: false,
- instanceState: InstanceStateEmpty,
- wantErr: true,
- wantRes: nil,
- },
- {
- desc: "instance_get_fails",
- instanceID: *instanceID,
- instanceGetFails: true,
- wantErr: true,
- wantRes: nil,
- },
- {
- desc: "timeout",
- instanceID: *instanceID,
- instanceGetFails: false,
- instanceState: InstanceStateProcessing,
- wantErr: true,
- wantRes: nil,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.desc, func(t *testing.T) {
- listRolesMock := func(_ v3beta1api.ApiListRolesRequestRequest) (*v3beta1api.ListRolesResponse, error) {
- return &v3beta1api.ListRolesResponse{
- Roles: []string{},
- }, nil
- }
-
- listUsersMock := func(_ v3beta1api.ApiListUsersRequestRequest) (*v3beta1api.ListUserResponse, error) {
- aux := int64(0)
- return &v3beta1api.ListUserResponse{
- Pagination: v3beta1api.Pagination{
- TotalRows: aux,
- },
- Users: []v3beta1api.ListUser{},
- }, nil
- }
-
- mockCall := func(_ v3beta1api.ApiGetInstanceRequestRequest) (*v3beta1api.GetInstanceResponse, error) {
- if tt.instanceGetFails {
- return nil, &oapierror.GenericOpenAPIError{
- StatusCode: 500,
- }
- }
-
- return &v3beta1api.GetInstanceResponse{
- Id: tt.instanceID,
- Status: v3beta1api.Status(tt.instanceState),
- Network: tt.instanceNetwork,
- Storage: v3beta1api.Storage{},
- }, nil
- }
-
- apiClient := v3beta1api.DefaultAPIServiceMock{
- GetInstanceRequestExecuteMock: &mockCall,
- ListUsersRequestExecuteMock: &listUsersMock,
- ListRolesRequestExecuteMock: &listRolesMock,
- }
-
- handler := CreateInstanceWaitHandler(context.Background(), apiClient, "", tt.instanceID, "")
-
- gotRes, err := handler.SetTimeout(10 * time.Millisecond).SetSleepBeforeWait(1 * time.Millisecond).WaitWithContext(context.Background())
- if (err != nil) != tt.wantErr {
- t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr)
- }
-
- if err == nil {
- if diff := cmp.Diff(tt.wantRes, gotRes); diff != "" {
- t.Errorf("model mismatch (-want +got):\n%s", diff)
- }
- }
- if !reflect.DeepEqual(gotRes, tt.wantRes) {
- t.Fatalf("handler gotRes = %v, want %v", gotRes, tt.wantRes)
- }
- },
- )
- }
-}
-
-func TestUpdateInstanceWaitHandler(t *testing.T) {
- tests := []struct {
- desc string
- instanceGetFails bool
- instanceState string
- wantErr bool
- wantResp bool
- }{
- {
- desc: "update_succeeded",
- instanceGetFails: false,
- instanceState: InstanceStateSuccess,
- wantErr: false,
- wantResp: true,
- },
- {
- desc: "update_failed",
- instanceGetFails: false,
- instanceState: InstanceStateFailed,
- wantErr: true,
- wantResp: true,
- },
- {
- desc: "update_failed_2",
- instanceGetFails: false,
- instanceState: InstanceStateEmpty,
- wantErr: true,
- wantResp: true,
- },
- {
- desc: "get_fails",
- instanceGetFails: true,
- wantErr: true,
- wantResp: false,
- },
- {
- desc: "timeout",
- instanceGetFails: false,
- instanceState: InstanceStateProcessing,
- wantErr: true,
- wantResp: true,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.desc, func(t *testing.T) {
- instanceID := "foo-bar"
-
- mockCall := func(_ v3beta1api.ApiGetInstanceRequestRequest) (*v3beta1api.GetInstanceResponse, error) {
- if tt.instanceGetFails {
- return nil, &oapierror.GenericOpenAPIError{
- StatusCode: 500,
- }
- }
-
- return &v3beta1api.GetInstanceResponse{
- Id: instanceID,
- Status: v3beta1api.Status(tt.instanceState),
- //Network: tt.instanceNetwork,
- }, nil
- }
-
- apiClient := v3beta1api.DefaultAPIServiceMock{
- GetInstanceRequestExecuteMock: &mockCall,
- }
-
- var wantRes *v3beta1api.GetInstanceResponse
- if tt.wantResp {
- wantRes = &v3beta1api.GetInstanceResponse{
- Id: instanceID,
- Status: v3beta1api.Status(tt.instanceState),
- }
- }
-
- handler := UpdateInstanceWaitHandler(context.Background(), apiClient, "", instanceID, "")
-
- gotRes, err := handler.SetTimeout(10 * time.Millisecond).SetSleepBeforeWait(1 * time.Millisecond).WaitWithContext(context.Background())
-
- if (err != nil) != tt.wantErr {
- t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr)
- }
- if !cmp.Equal(gotRes, wantRes) {
- t.Fatalf("handler gotRes = %v, want %v", gotRes, wantRes)
- }
- },
- )
- }
-}
-
-func TestDeleteInstanceWaitHandler(t *testing.T) {
- tests := []struct {
- desc string
- instanceGetFails bool
- instanceState string
- wantErr bool
- }{
- {
- desc: "delete_succeeded",
- instanceGetFails: false,
- instanceState: InstanceStateSuccess,
- wantErr: false,
- },
- {
- desc: "delete_failed",
- instanceGetFails: false,
- instanceState: InstanceStateFailed,
- wantErr: true,
- },
- {
- desc: "get_fails",
- instanceGetFails: true,
- wantErr: true,
- },
- }
- for _, tt := range tests {
- t.Run(
- tt.desc, func(t *testing.T) {
- instanceID := "foo-bar"
-
- mockCall := func(_ v3beta1api.ApiGetInstanceRequestRequest) (*v3beta1api.GetInstanceResponse, error) {
- if tt.instanceGetFails {
- return nil, &oapierror.GenericOpenAPIError{
- StatusCode: 500,
- }
- }
-
- if tt.instanceState == InstanceStateSuccess {
- return nil, &oapierror.GenericOpenAPIError{
- StatusCode: 404,
- }
- }
-
- return &v3beta1api.GetInstanceResponse{
- Id: instanceID,
- Status: v3beta1api.Status(tt.instanceState),
- //Network: tt.instanceNetwork,
- }, nil
- }
-
- apiClient := v3beta1api.DefaultAPIServiceMock{
- GetInstanceRequestExecuteMock: &mockCall,
- }
-
- handler := DeleteInstanceWaitHandler(context.Background(), apiClient, "", instanceID, "")
-
- _, err := handler.SetTimeout(10 * time.Millisecond).WaitWithContext(context.Background())
-
- if (err != nil) != tt.wantErr {
- t.Fatalf("handler error = %v, wantErr %v", err, tt.wantErr)
- }
- },
- )
- }
-}
diff --git a/stackit/provider.go b/stackit/provider.go
index 62990050..22ade416 100644
--- a/stackit/provider.go
+++ b/stackit/provider.go
@@ -19,24 +19,18 @@ import (
"github.com/hashicorp/terraform-plugin-log/tflog"
sdkauth "github.com/stackitcloud/stackit-sdk-go/core/auth"
"github.com/stackitcloud/stackit-sdk-go/core/config"
-
- sqlserverflexalphaDatabase "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/database"
- sqlserverflexalphaInstance "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance"
- sqlserverflexalphaUser "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/user"
- sqlserverflexbetaUser "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/user"
-
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/features"
-
postgresFlexAlphaDatabase "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database"
postgresFlexAlphaFlavor "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavor"
postgresflexalphaFlavors "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavors"
postgresFlexAlphaInstance "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance"
postgresFlexAlphaUser "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user"
-
- sqlserverFlexBetaDatabase "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/database"
- sqlserverflexBetaInstance "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance"
- // sqlserverFlexBetaUser "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbetaUser/user"
+ sqlserverflexalphaDatabase "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/database"
+ sqlserverFlexAlphaFlavor "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/flavor"
+ sqlServerFlexAlphaInstance "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance"
+ sqlserverFlexAlphaUser "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/user"
+ sqlserverflexalphaVersion "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/version"
)
// Ensure the implementation satisfies the expected interfaces
@@ -44,22 +38,6 @@ var (
_ provider.Provider = &Provider{}
)
-const providerConfigError = "Error configuring provider"
-
-//nolint:unused // These constants are defined for future use in retry logic for HTTP requests, which is not yet implemented.
-/*
-const (
- // maxRetries is the maximum number of retries for a failed HTTP request.
- maxRetries = 3
- // initialDelay is the initial delay before the first retry attempt.
- initialDelay = 2 * time.Second
- // maxDelay is the maximum delay between retry attempts.
- maxDelay = 90 * time.Second
- // perTryTimeout is the timeout for each individual HTTP request attempt.
- perTryTimeout = 30 * time.Second
-)
-*/
-
// Provider is the provider implementation.
type Provider struct {
version string
@@ -94,7 +72,7 @@ type providerModel struct {
// Custom endpoints
AuthorizationCustomEndpoint types.String `tfsdk:"authorization_custom_endpoint"`
CdnCustomEndpoint types.String `tfsdk:"cdn_custom_endpoint"`
- DNSCustomEndpoint types.String `tfsdk:"dns_custom_endpoint"`
+ DnsCustomEndpoint types.String `tfsdk:"dns_custom_endpoint"`
GitCustomEndpoint types.String `tfsdk:"git_custom_endpoint"`
IaaSCustomEndpoint types.String `tfsdk:"iaas_custom_endpoint"`
KmsCustomEndpoint types.String `tfsdk:"kms_custom_endpoint"`
@@ -126,7 +104,6 @@ type providerModel struct {
// Schema defines the provider-level schema for configuration data.
func (p *Provider) Schema(_ context.Context, _ provider.SchemaRequest, resp *provider.SchemaResponse) {
- //nolint:gosec // These are just descriptions, not actual credentials or sensitive information.
descriptions := map[string]string{
"credentials_path": "Path of JSON from where the credentials are read. Takes precedence over the env var `STACKIT_CREDENTIALS_PATH`. Default value is `~/.stackit/credentials.json`.",
"service_account_token": "Token used for authentication. If set, the token flow will be used to authenticate all operations.",
@@ -372,7 +349,7 @@ func (p *Provider) Configure(ctx context.Context, req provider.ConfigureRequest,
core.LogAndAddError(
ctx,
&resp.Diagnostics,
- providerConfigError,
+ "Error configuring provider",
fmt.Sprintf("Setting up bool value: %v", diags.Errors()),
)
}
@@ -391,7 +368,7 @@ func (p *Provider) Configure(ctx context.Context, req provider.ConfigureRequest,
setStringField(providerConfig.DefaultRegion, func(v string) { providerData.DefaultRegion = v })
setStringField(
- providerConfig.Region, // nolint:staticcheck // preliminary handling of deprecated attribute
+ providerConfig.Region,
func(v string) { providerData.Region = v }, // nolint:staticcheck // preliminary handling of deprecated attribute
)
setBoolField(providerConfig.EnableBetaResources, func(v bool) { providerData.EnableBetaResources = v })
@@ -401,7 +378,7 @@ func (p *Provider) Configure(ctx context.Context, req provider.ConfigureRequest,
func(v string) { providerData.AuthorizationCustomEndpoint = v },
)
setStringField(providerConfig.CdnCustomEndpoint, func(v string) { providerData.CdnCustomEndpoint = v })
- setStringField(providerConfig.DNSCustomEndpoint, func(v string) { providerData.DnsCustomEndpoint = v })
+ setStringField(providerConfig.DnsCustomEndpoint, func(v string) { providerData.DnsCustomEndpoint = v })
setStringField(providerConfig.GitCustomEndpoint, func(v string) { providerData.GitCustomEndpoint = v })
setStringField(providerConfig.IaaSCustomEndpoint, func(v string) { providerData.IaaSCustomEndpoint = v })
setStringField(providerConfig.KmsCustomEndpoint, func(v string) { providerData.KMSCustomEndpoint = v })
@@ -475,37 +452,27 @@ func (p *Provider) Configure(ctx context.Context, req provider.ConfigureRequest,
core.LogAndAddError(
ctx,
&resp.Diagnostics,
- providerConfigError,
+ "Error configuring provider",
fmt.Sprintf("Setting up experiments: %v", diags.Errors()),
)
}
providerData.Experiments = experimentValues
}
- baseRoundTripper, err := sdkauth.SetupAuth(sdkConfig)
+ roundTripper, err := sdkauth.SetupAuth(sdkConfig)
if err != nil {
core.LogAndAddError(
ctx,
&resp.Diagnostics,
- providerConfigError,
+ "Error configuring provider",
fmt.Sprintf("Setting up authentication: %v", err),
)
return
}
- //nolint:gocritic // maybe later in the code
- // roundTripper := core.NewRetryRoundTripper(
- // baseRoundTripper,
- // maxRetries,
- // initialDelay,
- // maxDelay,
- // perTryTimeout,
- //)
-
// Make round tripper and custom endpoints available during DataSource and Resource
// type Configure methods.
- // providerData.RoundTripper = roundTripper
- providerData.RoundTripper = baseRoundTripper
+ providerData.RoundTripper = roundTripper
resp.DataSourceData = providerData
resp.ResourceData = providerData
@@ -535,32 +502,23 @@ func (p *Provider) DataSources(_ context.Context) []func() datasource.DataSource
postgresFlexAlphaUser.NewUserDataSource,
postgresflexalphaFlavors.NewFlavorsDataSource,
- // sqlserverFlexAlphaFlavor.NewFlavorDataSource,
- sqlserverflexalphaInstance.NewInstanceDataSource,
- sqlserverflexalphaUser.NewUserDataSource,
+ sqlserverflexalphaVersion.NewVersionDataSource,
+ sqlserverFlexAlphaFlavor.NewFlavorDataSource,
+ sqlServerFlexAlphaInstance.NewInstanceDataSource,
+ sqlserverFlexAlphaUser.NewUserDataSource,
sqlserverflexalphaDatabase.NewDatabaseDataSource,
-
- sqlserverFlexBetaDatabase.NewDatabaseDataSource,
- sqlserverflexBetaInstance.NewInstanceDataSource,
- sqlserverflexbetaUser.NewUserDataSource,
- // sqlserverFlexBetaFlavor.NewFlavorDataSource,
}
}
// Resources defines the resources implemented in the provider.
func (p *Provider) Resources(_ context.Context) []func() resource.Resource {
resources := []func() resource.Resource{
+ postgresFlexAlphaDatabase.NewDatabaseResource,
postgresFlexAlphaInstance.NewInstanceResource,
postgresFlexAlphaUser.NewUserResource,
- postgresFlexAlphaDatabase.NewDatabaseResource,
-
- sqlserverflexalphaInstance.NewInstanceResource,
- sqlserverflexalphaUser.NewUserResource,
+ sqlServerFlexAlphaInstance.NewInstanceResource,
+ sqlserverFlexAlphaUser.NewUserResource,
sqlserverflexalphaDatabase.NewDatabaseResource,
-
- sqlserverflexBetaInstance.NewInstanceResource,
- sqlserverflexbetaUser.NewUserResource,
- sqlserverFlexBetaDatabase.NewDatabaseResource,
}
return resources
}
diff --git a/stackit/provider_acc_test.go b/stackit/provider_acc_test.go
index 38e22144..cfd6095f 100644
--- a/stackit/provider_acc_test.go
+++ b/stackit/provider_acc_test.go
@@ -1,38 +1,20 @@
+// Copyright (c) STACKIT
+
package stackit_test
import (
- "context"
_ "embed"
+ "fmt"
"os"
- "reflect"
+ "path"
"regexp"
+ "runtime"
"testing"
- "github.com/google/go-cmp/cmp"
-
- sqlserverflexalphaDatabase "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/database"
-
- //nolint:staticcheck // used for acceptance testing
- postgresFlexAlphaFlavor "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavor"
-
- "github.com/hashicorp/terraform-plugin-framework/datasource"
- tfResource "github.com/hashicorp/terraform-plugin-framework/resource"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit"
- postgresFlexAlphaDatabase "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database"
- postgresflexalphaFlavors "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavors"
- postgresFlexAlphaInstance "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance"
- postgresFlexAlphaUser "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user"
- sqlserverFlexAlphaInstance "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/instance"
- sqlserverFlexAlphaUser "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/user"
- sqlserverflexBetaDatabase "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/database"
- sqlserverFlexBetaInstance "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance"
- sqlserverFlexBetaUser "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/user"
-
- "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/internal/testutils"
-
"github.com/hashicorp/terraform-plugin-testing/config"
- "github.com/hashicorp/terraform-plugin-testing/helper/resource" //nolint:staticcheck // used for acceptance testing
+ "github.com/hashicorp/terraform-plugin-testing/helper/acctest"
+ "github.com/hashicorp/terraform-plugin-testing/helper/resource"
+ "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/testutil"
)
//go:embed testdata/provider-credentials.tf
@@ -44,228 +26,236 @@ var providerInvalidAttribute string
//go:embed testdata/provider-all-attributes.tf
var providerValidAttributes string
-var testConfigProviderCredentials config.Variables
-
-func TestMain(m *testing.M) {
- testutils.Setup()
- code := m.Run()
- // shutdown()
- os.Exit(code)
+var testConfigProviderCredentials = config.Variables{
+ "project_id": config.StringVariable(testutil.ProjectId),
+ "name": config.StringVariable(fmt.Sprintf("tf-acc-prov%s", acctest.RandStringFromCharSet(3, acctest.CharSetAlphaNum))),
}
-func TestUnitProviderHasChildDataSources_Basic(t *testing.T) {
- expectedDataSources := []datasource.DataSource{
- postgresFlexAlphaFlavor.NewFlavorDataSource(),
- // postgresFlexAlphaFlavor.NewFlavorListDataSource,
- postgresFlexAlphaDatabase.NewDatabaseDataSource(),
- postgresFlexAlphaInstance.NewInstanceDataSource(),
- postgresFlexAlphaUser.NewUserDataSource(),
- postgresflexalphaFlavors.NewFlavorsDataSource(),
-
- // sqlserverFlexAlphaFlavor.NewFlavorDataSource(),
- sqlserverFlexAlphaInstance.NewInstanceDataSource(),
- sqlserverFlexAlphaUser.NewUserDataSource(),
- sqlserverflexalphaDatabase.NewDatabaseDataSource(),
-
- sqlserverflexBetaDatabase.NewDatabaseDataSource(),
- sqlserverFlexBetaInstance.NewInstanceDataSource(),
- sqlserverFlexBetaUser.NewUserDataSource(),
- // sqlserverFlexBetaFlavor.NewFlavorDataSource(),
+// Helper function to obtain the home directory on different systems.
+// Based on os.UserHomeDir().
+func getHomeEnvVariableName() string {
+ env := "HOME"
+ switch runtime.GOOS {
+ case "windows":
+ env = "USERPROFILE"
+ case "plan9":
+ env = "home"
}
- provider, ok := stackit.New("testing")().(*stackit.Provider)
- if !ok {
- t.Fatal("could not assert provider type")
- }
- datasources := provider.DataSources(context.Background())
+ return env
+}
- expectedMap := map[string]struct{}{}
- for _, d := range expectedDataSources {
- expectedMap[reflect.TypeOf(d).String()] = struct{}{}
+// create temporary home and initialize the credentials file as well
+func createTemporaryHome(createValidCredentialsFile bool, t *testing.T) string {
+ // create a temporary file
+ tempHome, err := os.MkdirTemp("", "tempHome")
+ if err != nil {
+ t.Fatalf("Failed to create temporary home directory: %v", err)
}
- actualMap := map[string]struct{}{}
- for _, d := range datasources {
- actualMap[reflect.TypeOf(d()).String()] = struct{}{}
+ // create credentials file in temp directory
+ stackitFolder := path.Join(tempHome, ".stackit")
+ if err := os.Mkdir(stackitFolder, 0o750); err != nil {
+ t.Fatalf("Failed to create stackit folder: %v", err)
}
- if diff := cmp.Diff(expectedMap, actualMap); diff != "" {
- t.Errorf("DataSources mismatch (-expected +actual):\n%s", diff)
+ filePath := path.Join(stackitFolder, "credentials.json")
+ file, err := os.Create(filePath)
+ if err != nil {
+ t.Fatalf("Failed to create credentials file: %v", err)
+ }
+ defer func() {
+ if err := file.Close(); err != nil {
+ t.Fatalf("Error while closing the file: %v", err)
+ }
+ }()
+
+ // Define content, default = invalid token
+ token := "foo_token"
+ if createValidCredentialsFile {
+ token = testutil.GetTestProjectServiceAccountToken("")
+ }
+ content := fmt.Sprintf(`
+ {
+ "STACKIT_SERVICE_ACCOUNT_TOKEN": "%s"
+ }`, token)
+
+ if _, err = file.WriteString(content); err != nil {
+ t.Fatalf("Error writing to file: %v", err)
+ }
+
+ return tempHome
+}
+
+// Function to overwrite the home folder
+func setTemporaryHome(tempHomePath string) {
+ env := getHomeEnvVariableName()
+ if err := os.Setenv(env, tempHomePath); err != nil {
+ fmt.Printf("Error setting temporary home directory %v", err)
}
}
-func TestUnitProviderHasChildResources_Basic(t *testing.T) {
- expectedResources := []tfResource.Resource{
- postgresFlexAlphaInstance.NewInstanceResource(),
- postgresFlexAlphaUser.NewUserResource(),
- postgresFlexAlphaDatabase.NewDatabaseResource(),
-
- sqlserverFlexAlphaInstance.NewInstanceResource(),
- sqlserverFlexAlphaUser.NewUserResource(),
- sqlserverflexalphaDatabase.NewDatabaseResource(),
-
- sqlserverFlexBetaInstance.NewInstanceResource(),
- sqlserverFlexBetaUser.NewUserResource(),
- sqlserverflexBetaDatabase.NewDatabaseResource(),
+// cleanup the temporary home and reset the environment variable
+func cleanupTemporaryHome(tempHomePath string, t *testing.T) {
+ if err := os.RemoveAll(tempHomePath); err != nil {
+ t.Fatalf("Error cleaning up temporary folder: %v", err)
}
- provider, ok := stackit.New("testing")().(*stackit.Provider)
- if !ok {
- t.Fatal("could not assert provider type")
+ originalHomeDir, err := os.UserHomeDir()
+ if err != nil {
+ t.Fatalf("Failed to restore home directory back to normal: %v", err)
}
- resources := provider.Resources(context.Background())
-
- expectedMap := map[string]struct{}{}
- for _, r := range expectedResources {
- expectedMap[reflect.TypeOf(r).String()] = struct{}{}
- }
-
- actualMap := map[string]struct{}{}
- for _, r := range resources {
- actualMap[reflect.TypeOf(r()).String()] = struct{}{}
- }
-
- if diff := cmp.Diff(expectedMap, actualMap); diff != "" {
- t.Errorf("Resources mismatch (-expected +actual):\n%s", diff)
+ // revert back to original home folder
+ env := getHomeEnvVariableName()
+ if err := os.Setenv(env, originalHomeDir); err != nil {
+ fmt.Printf("Error resetting temporary home directory %v", err)
}
}
-func TestAccEnvVarServiceAccountPathValid(t *testing.T) {
- t.Skip("needs refactoring")
+func getServiceAccountToken() (string, error) {
+ token, set := os.LookupEnv("TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN")
+ if !set || token == "" {
+ return "", fmt.Errorf("Token not set, please set TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN to a valid token to perform tests")
+ }
+ return token, nil
+}
+
+func TestAccEnvVarTokenValid(t *testing.T) {
// Check if acceptance tests should be run
if v := os.Getenv(resource.EnvTfAcc); v == "" {
t.Skipf(
"Acceptance tests skipped unless env '%s' set",
- resource.EnvTfAcc,
- )
+ resource.EnvTfAcc)
return
}
- tempHomeFolder := testutils.CreateTemporaryHome(true, t)
- defer testutils.CleanupTemporaryHome(tempHomeFolder, t)
- resource.Test(
- t, resource.TestCase{
- ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
- Steps: []resource.TestStep{
- {
- PreConfig: func() { testutils.SetTemporaryHome(tempHomeFolder) },
- ConfigVariables: testConfigProviderCredentials,
- Config: providerCredentialConfig,
- },
+ token, err := getServiceAccountToken()
+ if err != nil {
+ t.Fatalf("Can't get token: %v", err)
+ }
+
+ t.Setenv("STACKIT_CREDENTIALS_PATH", "")
+ t.Setenv("STACKIT_SERVICE_ACCOUNT_TOKEN", token)
+ tempHomeFolder := createTemporaryHome(false, t)
+ defer cleanupTemporaryHome(tempHomeFolder, t)
+ resource.Test(t, resource.TestCase{
+ ProtoV6ProviderFactories: testutil.TestAccProtoV6ProviderFactories,
+ Steps: []resource.TestStep{
+ {
+ PreConfig: func() { setTemporaryHome(tempHomeFolder) },
+ ConfigVariables: testConfigProviderCredentials,
+ Config: providerCredentialConfig,
},
},
- )
+ })
}
-func TestAccEnvVarServiceAccountPathInvalid(t *testing.T) {
- t.Skip("needs refactoring")
+func TestAccEnvVarTokenInvalid(t *testing.T) {
t.Setenv("STACKIT_CREDENTIALS_PATH", "")
- tempHomeFolder := testutils.CreateTemporaryHome(false, t)
- defer testutils.CleanupTemporaryHome(tempHomeFolder, t)
- resource.Test(
- t, resource.TestCase{
- ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
- Steps: []resource.TestStep{
- {
- PreConfig: func() { testutils.SetTemporaryHome(tempHomeFolder) },
- ConfigVariables: testConfigProviderCredentials,
- Config: providerCredentialConfig,
- ExpectError: regexp.MustCompile(`undefined response type, status code 401`),
- },
+ t.Setenv("STACKIT_SERVICE_ACCOUNT_TOKEN", "foo")
+ tempHomeFolder := createTemporaryHome(false, t)
+ defer cleanupTemporaryHome(tempHomeFolder, t)
+ resource.Test(t, resource.TestCase{
+ ProtoV6ProviderFactories: testutil.TestAccProtoV6ProviderFactories,
+ Steps: []resource.TestStep{
+ {
+ PreConfig: func() { setTemporaryHome(tempHomeFolder) },
+ ConfigVariables: testConfigProviderCredentials,
+ Config: providerCredentialConfig,
+ ExpectError: regexp.MustCompile(`undefined response type, status code 401`),
},
},
- )
+ })
}
func TestAccCredentialsFileValid(t *testing.T) {
- t.Skip("needs refactoring")
t.Setenv("STACKIT_CREDENTIALS_PATH", "")
- tempHomeFolder := testutils.CreateTemporaryHome(true, t)
- defer testutils.CleanupTemporaryHome(tempHomeFolder, t)
- resource.Test(
- t, resource.TestCase{
- ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
- Steps: []resource.TestStep{
- {
- PreConfig: func() { testutils.SetTemporaryHome(tempHomeFolder) },
- ConfigVariables: testConfigProviderCredentials,
- Config: providerCredentialConfig,
- },
+ t.Setenv("STACKIT_SERVICE_ACCOUNT_TOKEN", "")
+ tempHomeFolder := createTemporaryHome(true, t)
+ defer cleanupTemporaryHome(tempHomeFolder, t)
+ resource.Test(t, resource.TestCase{
+ ProtoV6ProviderFactories: testutil.TestAccProtoV6ProviderFactories,
+ Steps: []resource.TestStep{
+ {
+ PreConfig: func() { setTemporaryHome(tempHomeFolder) },
+ ConfigVariables: testConfigProviderCredentials,
+ Config: providerCredentialConfig,
},
},
- )
+ })
}
func TestAccCredentialsFileInvalid(t *testing.T) {
- t.Skip("needs refactoring")
t.Setenv("STACKIT_CREDENTIALS_PATH", "")
- tempHomeFolder := testutils.CreateTemporaryHome(false, t)
- defer testutils.CleanupTemporaryHome(tempHomeFolder, t)
- resource.Test(
- t, resource.TestCase{
- ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
- Steps: []resource.TestStep{
- {
- PreConfig: func() { testutils.SetTemporaryHome(tempHomeFolder) },
- ConfigVariables: testConfigProviderCredentials,
- Config: providerCredentialConfig,
- ExpectError: regexp.MustCompile(`Jwt is not in(\r\n|\r|\n)the form of Header.Payload.Signature`),
- },
+ t.Setenv("STACKIT_SERVICE_ACCOUNT_TOKEN", "")
+ tempHomeFolder := createTemporaryHome(false, t)
+ defer cleanupTemporaryHome(tempHomeFolder, t)
+ resource.Test(t, resource.TestCase{
+ ProtoV6ProviderFactories: testutil.TestAccProtoV6ProviderFactories,
+ Steps: []resource.TestStep{
+ {
+ PreConfig: func() { setTemporaryHome(tempHomeFolder) },
+ ConfigVariables: testConfigProviderCredentials,
+ Config: providerCredentialConfig,
+ ExpectError: regexp.MustCompile(`Jwt is not in(\r\n|\r|\n)the form of Header.Payload.Signature`),
},
},
- )
+ })
}
func TestAccProviderConfigureValidValues(t *testing.T) {
- t.Skip("needs refactoring")
// Check if acceptance tests should be run
if v := os.Getenv(resource.EnvTfAcc); v == "" {
t.Skipf(
"Acceptance tests skipped unless env '%s' set",
- resource.EnvTfAcc,
- )
+ resource.EnvTfAcc)
return
}
+ // use service account token for these tests
+ token, err := getServiceAccountToken()
+ if err != nil {
+ t.Fatalf("Can't get token: %v", err)
+ }
+
t.Setenv("STACKIT_CREDENTIALS_PATH", "")
- tempHomeFolder := testutils.CreateTemporaryHome(true, t)
- defer testutils.CleanupTemporaryHome(tempHomeFolder, t)
- resource.Test(
- t, resource.TestCase{
- ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
- Steps: []resource.TestStep{
- {
- // valid provider attributes
- ConfigVariables: testConfigProviderCredentials,
- Config: providerValidAttributes,
- },
+ t.Setenv("STACKIT_SERVICE_ACCOUNT_TOKEN", token)
+ tempHomeFolder := createTemporaryHome(true, t)
+ defer cleanupTemporaryHome(tempHomeFolder, t)
+ resource.Test(t, resource.TestCase{
+ ProtoV6ProviderFactories: testutil.TestAccProtoV6ProviderFactories,
+ Steps: []resource.TestStep{
+ { // valid provider attributes
+ ConfigVariables: testConfigProviderCredentials,
+ Config: providerValidAttributes,
},
},
- )
+ })
}
func TestAccProviderConfigureAnInvalidValue(t *testing.T) {
- t.Skip("needs refactoring")
// Check if acceptance tests should be run
if v := os.Getenv(resource.EnvTfAcc); v == "" {
t.Skipf(
"Acceptance tests skipped unless env '%s' set",
- resource.EnvTfAcc,
- )
+ resource.EnvTfAcc)
return
}
+ // use service account token for these tests
+ token, err := getServiceAccountToken()
+ if err != nil {
+ t.Fatalf("Can't get token: %v", err)
+ }
t.Setenv("STACKIT_CREDENTIALS_PATH", "")
- tempHomeFolder := testutils.CreateTemporaryHome(true, t)
- defer testutils.CleanupTemporaryHome(tempHomeFolder, t)
- resource.Test(
- t, resource.TestCase{
- ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
- Steps: []resource.TestStep{
- {
- // invalid test attribute should throw an error
- ConfigVariables: testConfigProviderCredentials,
- Config: providerInvalidAttribute,
- ExpectError: regexp.MustCompile(`An argument named "test" is not expected here\.`),
- },
+ t.Setenv("STACKIT_SERVICE_ACCOUNT_TOKEN", token)
+ tempHomeFolder := createTemporaryHome(true, t)
+ defer cleanupTemporaryHome(tempHomeFolder, t)
+ resource.Test(t, resource.TestCase{
+ ProtoV6ProviderFactories: testutil.TestAccProtoV6ProviderFactories,
+ Steps: []resource.TestStep{
+ { // invalid test attribute should throw an error
+ ConfigVariables: testConfigProviderCredentials,
+ Config: providerInvalidAttribute,
+ ExpectError: regexp.MustCompile(`An argument named "test" is not expected here\.`),
},
},
- )
+ })
}
diff --git a/stackit/testdata/provider-all-attributes.tf b/stackit/testdata/provider-all-attributes.tf
index 9ec02936..930fc553 100644
--- a/stackit/testdata/provider-all-attributes.tf
+++ b/stackit/testdata/provider-all-attributes.tf
@@ -1,8 +1,8 @@
variable "project_id" {}
-variable "region" {}
+variable "name" {}
-provider "stackitprivatepreview" {
+provider "stackit" {
default_region = "eu01"
credentials_path = "~/.stackit/credentials.json"
service_account_token = ""
@@ -36,11 +36,7 @@ provider "stackitprivatepreview" {
enable_beta_resources = "true"
}
-data "stackitprivatepreview_postgresflexalpha_flavor" "flavor" {
- project_id = var.project_id
- region = var.region
- cpu = 2
- ram = 4
- node_type = "Single"
- storage_class = "premium-perf2-stackit"
+resource "stackit_network" "network" {
+ name = var.name
+ project_id = var.project_id
}
diff --git a/stackit/testdata/provider-credentials.tf b/stackit/testdata/provider-credentials.tf
index d348939e..a0ed79f4 100644
--- a/stackit/testdata/provider-credentials.tf
+++ b/stackit/testdata/provider-credentials.tf
@@ -1,18 +1,11 @@
variable "project_id" {}
-variable "region" {}
+variable "name" {}
-variable "service_account_key_path" {}
-
-provider "stackitprivatepreview" {
- service_account_key_path = var.service_account_key_path
+provider "stackit" {
}
-data "stackitprivatepreview_postgresflexalpha_flavor" "flavor" {
- project_id = var.project_id
- region = var.region
- cpu = 2
- ram = 4
- node_type = "Single"
- storage_class = "premium-perf2-stackit"
-}
+resource "stackit_network" "network" {
+ name = var.name
+ project_id = var.project_id
+}
\ No newline at end of file
diff --git a/stackit/testdata/provider-invalid-attribute.tf b/stackit/testdata/provider-invalid-attribute.tf
index 1c9d1729..524610e6 100644
--- a/stackit/testdata/provider-invalid-attribute.tf
+++ b/stackit/testdata/provider-invalid-attribute.tf
@@ -1,16 +1,12 @@
variable "project_id" {}
-variable "region" {}
+variable "name" {}
-provider "stackitprivatepreview" {
+provider "stackit" {
test = "test"
}
-data "stackitprivatepreview_postgresflexalpha_flavor" "flavor" {
- project_id = var.project_id
- region = var.region
- cpu = 2
- ram = 4
- node_type = "Single"
- storage_class = "premium-perf2-stackit"
-}
+resource "stackit_network" "network" {
+ name = var.name
+ project_id = var.project_id
+}
\ No newline at end of file
diff --git a/tools/go.mod b/tools/go.mod
deleted file mode 100644
index fe55a2d8..00000000
--- a/tools/go.mod
+++ /dev/null
@@ -1,263 +0,0 @@
-module tools
-
-go 1.25.6
-
-require (
- github.com/golangci/golangci-lint/v2 v2.10.1
- github.com/hashicorp/terraform-plugin-codegen-framework v0.4.1
- github.com/hashicorp/terraform-plugin-codegen-openapi v0.3.0
- github.com/hashicorp/terraform-plugin-docs v0.24.0
- golang.org/x/tools v0.42.0
-)
-
-require (
- 4d63.com/gocheckcompilerdirectives v1.3.0 // indirect
- 4d63.com/gochecknoglobals v0.2.2 // indirect
- codeberg.org/chavacava/garif v0.2.0 // indirect
- codeberg.org/polyfloyd/go-errorlint v1.9.0 // indirect
- dev.gaijin.team/go/exhaustruct/v4 v4.0.0 // indirect
- dev.gaijin.team/go/golib v0.6.0 // indirect
- github.com/4meepo/tagalign v1.4.3 // indirect
- github.com/Abirdcfly/dupword v0.1.7 // indirect
- github.com/AdminBenni/iota-mixing v1.0.0 // indirect
- github.com/AlwxSin/noinlineerr v1.0.5 // indirect
- github.com/Antonboom/errname v1.1.1 // indirect
- github.com/Antonboom/nilnil v1.1.1 // indirect
- github.com/Antonboom/testifylint v1.6.4 // indirect
- github.com/BurntSushi/toml v1.6.0 // indirect
- github.com/Djarvur/go-err113 v0.1.1 // indirect
- github.com/Kunde21/markdownfmt/v3 v3.1.0 // indirect
- github.com/Masterminds/goutils v1.1.1 // indirect
- github.com/Masterminds/semver/v3 v3.4.0 // indirect
- github.com/Masterminds/sprig/v3 v3.2.3 // indirect
- github.com/MirrexOne/unqueryvet v1.5.3 // indirect
- github.com/OpenPeeDeeP/depguard/v2 v2.2.1 // indirect
- github.com/ProtonMail/go-crypto v1.1.6 // indirect
- github.com/alecthomas/chroma/v2 v2.23.1 // indirect
- github.com/alecthomas/go-check-sumtype v0.3.1 // indirect
- github.com/alexkohler/nakedret/v2 v2.0.6 // indirect
- github.com/alexkohler/prealloc v1.0.2 // indirect
- github.com/alfatraining/structtag v1.0.0 // indirect
- github.com/alingse/asasalint v0.0.11 // indirect
- github.com/alingse/nilnesserr v0.2.0 // indirect
- github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect
- github.com/armon/go-radix v1.0.0 // indirect
- github.com/ashanbrown/forbidigo/v2 v2.3.0 // indirect
- github.com/ashanbrown/makezero/v2 v2.1.0 // indirect
- github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
- github.com/bahlo/generic-list-go v0.2.0 // indirect
- github.com/beorn7/perks v1.0.1 // indirect
- github.com/bgentry/speakeasy v0.1.0 // indirect
- github.com/bkielbasa/cyclop v1.2.3 // indirect
- github.com/blizzy78/varnamelen v0.8.0 // indirect
- github.com/bmatcuk/doublestar/v4 v4.9.1 // indirect
- github.com/bombsimon/wsl/v4 v4.7.0 // indirect
- github.com/bombsimon/wsl/v5 v5.6.0 // indirect
- github.com/breml/bidichk v0.3.3 // indirect
- github.com/breml/errchkjson v0.4.1 // indirect
- github.com/buger/jsonparser v1.1.1 // indirect
- github.com/butuzov/ireturn v0.4.0 // indirect
- github.com/butuzov/mirror v1.3.0 // indirect
- github.com/catenacyber/perfsprint v0.10.1 // indirect
- github.com/ccojocar/zxcvbn-go v1.0.4 // indirect
- github.com/cespare/xxhash/v2 v2.3.0 // indirect
- github.com/charithe/durationcheck v0.0.11 // indirect
- github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc // indirect
- github.com/charmbracelet/lipgloss v1.1.0 // indirect
- github.com/charmbracelet/x/ansi v0.10.1 // indirect
- github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd // indirect
- github.com/charmbracelet/x/term v0.2.1 // indirect
- github.com/ckaznocha/intrange v0.3.1 // indirect
- github.com/cloudflare/circl v1.6.1 // indirect
- github.com/curioswitch/go-reassign v0.3.0 // indirect
- github.com/daixiang0/gci v0.13.7 // indirect
- github.com/dave/dst v0.27.3 // indirect
- github.com/davecgh/go-spew v1.1.1 // indirect
- github.com/denis-tingaikin/go-header v0.5.0 // indirect
- github.com/dlclark/regexp2 v1.11.5 // indirect
- github.com/dprotaso/go-yit v0.0.0-20220510233725-9ba8df137936 // indirect
- github.com/ettle/strcase v0.2.0 // indirect
- github.com/fatih/color v1.18.0 // indirect
- github.com/fatih/structtag v1.2.0 // indirect
- github.com/firefart/nonamedreturns v1.0.6 // indirect
- github.com/fsnotify/fsnotify v1.5.4 // indirect
- github.com/fzipp/gocyclo v0.6.0 // indirect
- github.com/ghostiam/protogetter v0.3.20 // indirect
- github.com/go-critic/go-critic v0.14.3 // indirect
- github.com/go-toolsmith/astcast v1.1.0 // indirect
- github.com/go-toolsmith/astcopy v1.1.0 // indirect
- github.com/go-toolsmith/astequal v1.2.0 // indirect
- github.com/go-toolsmith/astfmt v1.1.0 // indirect
- github.com/go-toolsmith/astp v1.1.0 // indirect
- github.com/go-toolsmith/strparse v1.1.0 // indirect
- github.com/go-toolsmith/typep v1.1.0 // indirect
- github.com/go-viper/mapstructure/v2 v2.5.0 // indirect
- github.com/go-xmlfmt/xmlfmt v1.1.3 // indirect
- github.com/gobwas/glob v0.2.3 // indirect
- github.com/godoc-lint/godoc-lint v0.11.2 // indirect
- github.com/gofrs/flock v0.13.0 // indirect
- github.com/golang/protobuf v1.5.3 // indirect
- github.com/golangci/asciicheck v0.5.0 // indirect
- github.com/golangci/dupl v0.0.0-20250308024227-f665c8d69b32 // indirect
- github.com/golangci/go-printf-func-name v0.1.1 // indirect
- github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d // indirect
- github.com/golangci/golines v0.15.0 // indirect
- github.com/golangci/misspell v0.8.0 // indirect
- github.com/golangci/plugin-module-register v0.1.2 // indirect
- github.com/golangci/revgrep v0.8.0 // indirect
- github.com/golangci/swaggoswag v0.0.0-20250504205917-77f2aca3143e // indirect
- github.com/golangci/unconvert v0.0.0-20250410112200-a129a6e6413e // indirect
- github.com/google/go-cmp v0.7.0 // indirect
- github.com/google/uuid v1.6.0 // indirect
- github.com/gordonklaus/ineffassign v0.2.0 // indirect
- github.com/gostaticanalysis/analysisutil v0.7.1 // indirect
- github.com/gostaticanalysis/comment v1.5.0 // indirect
- github.com/gostaticanalysis/forcetypeassert v0.2.0 // indirect
- github.com/gostaticanalysis/nilerr v0.1.2 // indirect
- github.com/hashicorp/cli v1.1.7 // indirect
- github.com/hashicorp/errwrap v1.1.0 // indirect
- github.com/hashicorp/go-checkpoint v0.5.0 // indirect
- github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
- github.com/hashicorp/go-immutable-radix/v2 v2.1.0 // indirect
- github.com/hashicorp/go-multierror v1.1.1 // indirect
- github.com/hashicorp/go-retryablehttp v0.7.7 // indirect
- github.com/hashicorp/go-uuid v1.0.3 // indirect
- github.com/hashicorp/go-version v1.8.0 // indirect
- github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
- github.com/hashicorp/hc-install v0.9.2 // indirect
- github.com/hashicorp/hcl v1.0.0 // indirect
- github.com/hashicorp/terraform-exec v0.24.0 // indirect
- github.com/hashicorp/terraform-json v0.27.2 // indirect
- github.com/hashicorp/terraform-plugin-codegen-spec v0.2.0 // indirect
- github.com/hexops/gotextdiff v1.0.3 // indirect
- github.com/huandu/xstrings v1.4.0 // indirect
- github.com/imdario/mergo v0.3.16 // indirect
- github.com/inconshreveable/mousetrap v1.1.0 // indirect
- github.com/jgautheron/goconst v1.8.2 // indirect
- github.com/jingyugao/rowserrcheck v1.1.1 // indirect
- github.com/jjti/go-spancheck v0.6.5 // indirect
- github.com/julz/importas v0.2.0 // indirect
- github.com/karamaru-alpha/copyloopvar v1.2.2 // indirect
- github.com/kisielk/errcheck v1.9.0 // indirect
- github.com/kkHAIKE/contextcheck v1.1.6 // indirect
- github.com/kulti/thelper v0.7.1 // indirect
- github.com/kunwardeep/paralleltest v1.0.15 // indirect
- github.com/lasiar/canonicalheader v1.1.2 // indirect
- github.com/ldez/exptostd v0.4.5 // indirect
- github.com/ldez/gomoddirectives v0.8.0 // indirect
- github.com/ldez/grignotin v0.10.1 // indirect
- github.com/ldez/structtags v0.6.1 // indirect
- github.com/ldez/tagliatelle v0.7.2 // indirect
- github.com/ldez/usetesting v0.5.0 // indirect
- github.com/leonklingele/grouper v1.1.2 // indirect
- github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
- github.com/macabu/inamedparam v0.2.0 // indirect
- github.com/magiconair/properties v1.8.6 // indirect
- github.com/mailru/easyjson v0.7.7 // indirect
- github.com/manuelarte/embeddedstructfieldcheck v0.4.0 // indirect
- github.com/manuelarte/funcorder v0.5.0 // indirect
- github.com/maratori/testableexamples v1.0.1 // indirect
- github.com/maratori/testpackage v1.1.2 // indirect
- github.com/matoous/godox v1.1.0 // indirect
- github.com/mattn/go-colorable v0.1.14 // indirect
- github.com/mattn/go-isatty v0.0.20 // indirect
- github.com/mattn/go-runewidth v0.0.16 // indirect
- github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect
- github.com/mgechev/revive v1.14.0 // indirect
- github.com/mitchellh/copystructure v1.2.0 // indirect
- github.com/mitchellh/go-homedir v1.1.0 // indirect
- github.com/mitchellh/mapstructure v1.5.0 // indirect
- github.com/mitchellh/reflectwalk v1.0.2 // indirect
- github.com/moricho/tparallel v0.3.2 // indirect
- github.com/muesli/termenv v0.16.0 // indirect
- github.com/nakabonne/nestif v0.3.1 // indirect
- github.com/nishanths/exhaustive v0.12.0 // indirect
- github.com/nishanths/predeclared v0.2.2 // indirect
- github.com/nunnatsa/ginkgolinter v0.23.0 // indirect
- github.com/pb33f/libopenapi v0.15.0 // indirect
- github.com/pelletier/go-toml v1.9.5 // indirect
- github.com/pelletier/go-toml/v2 v2.2.4 // indirect
- github.com/pmezard/go-difflib v1.0.0 // indirect
- github.com/posener/complete v1.2.3 // indirect
- github.com/prometheus/client_golang v1.12.1 // indirect
- github.com/prometheus/client_model v0.2.0 // indirect
- github.com/prometheus/common v0.32.1 // indirect
- github.com/prometheus/procfs v0.7.3 // indirect
- github.com/quasilyte/go-ruleguard v0.4.5 // indirect
- github.com/quasilyte/go-ruleguard/dsl v0.3.23 // indirect
- github.com/quasilyte/gogrep v0.5.0 // indirect
- github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727 // indirect
- github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 // indirect
- github.com/raeperd/recvcheck v0.2.0 // indirect
- github.com/rivo/uniseg v0.4.7 // indirect
- github.com/rogpeppe/go-internal v1.14.1 // indirect
- github.com/ryancurrah/gomodguard v1.4.1 // indirect
- github.com/ryanrolds/sqlclosecheck v0.5.1 // indirect
- github.com/sanposhiho/wastedassign/v2 v2.1.0 // indirect
- github.com/santhosh-tekuri/jsonschema/v6 v6.0.2 // indirect
- github.com/sashamelentyev/interfacebloat v1.1.0 // indirect
- github.com/sashamelentyev/usestdlibvars v1.29.0 // indirect
- github.com/securego/gosec/v2 v2.23.0 // indirect
- github.com/shopspring/decimal v1.3.1 // indirect
- github.com/sirupsen/logrus v1.9.4 // indirect
- github.com/sivchari/containedctx v1.0.3 // indirect
- github.com/sonatard/noctx v0.4.0 // indirect
- github.com/sourcegraph/go-diff v0.7.0 // indirect
- github.com/spf13/afero v1.15.0 // indirect
- github.com/spf13/cast v1.5.1 // indirect
- github.com/spf13/cobra v1.10.2 // indirect
- github.com/spf13/jwalterweatherman v1.1.0 // indirect
- github.com/spf13/pflag v1.0.10 // indirect
- github.com/spf13/viper v1.12.0 // indirect
- github.com/ssgreg/nlreturn/v2 v2.2.1 // indirect
- github.com/stbenjam/no-sprintf-host-port v0.3.1 // indirect
- github.com/stretchr/objx v0.5.2 // indirect
- github.com/stretchr/testify v1.11.1 // indirect
- github.com/subosito/gotenv v1.4.1 // indirect
- github.com/tetafro/godot v1.5.4 // indirect
- github.com/timakin/bodyclose v0.0.0-20241222091800-1db5c5ca4d67 // indirect
- github.com/timonwong/loggercheck v0.11.0 // indirect
- github.com/tomarrell/wrapcheck/v2 v2.12.0 // indirect
- github.com/tommy-muehle/go-mnd/v2 v2.5.1 // indirect
- github.com/ultraware/funlen v0.2.0 // indirect
- github.com/ultraware/whitespace v0.2.0 // indirect
- github.com/uudashr/gocognit v1.2.0 // indirect
- github.com/uudashr/iface v1.4.1 // indirect
- github.com/vmware-labs/yaml-jsonpath v0.3.2 // indirect
- github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect
- github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect
- github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect
- github.com/xeipuuv/gojsonschema v1.2.0 // indirect
- github.com/xen0n/gosmopolitan v1.3.0 // indirect
- github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
- github.com/yagipy/maintidx v1.0.0 // indirect
- github.com/yeya24/promlinter v0.3.0 // indirect
- github.com/ykadowak/zerologlint v0.1.5 // indirect
- github.com/yuin/goldmark v1.7.7 // indirect
- github.com/yuin/goldmark-meta v1.1.0 // indirect
- github.com/zclconf/go-cty v1.17.0 // indirect
- gitlab.com/bosi/decorder v0.4.2 // indirect
- go-simpler.org/musttag v0.14.0 // indirect
- go-simpler.org/sloglint v0.11.1 // indirect
- go.abhg.dev/goldmark/frontmatter v0.2.0 // indirect
- go.augendre.info/arangolint v0.4.0 // indirect
- go.augendre.info/fatcontext v0.9.0 // indirect
- go.uber.org/multierr v1.10.0 // indirect
- go.uber.org/zap v1.27.0 // indirect
- go.yaml.in/yaml/v3 v3.0.4 // indirect
- golang.org/x/crypto v0.48.0 // indirect
- golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b // indirect
- golang.org/x/exp/typeparams v0.0.0-20260209203927-2842357ff358 // indirect
- golang.org/x/mod v0.33.0 // indirect
- golang.org/x/sync v0.19.0 // indirect
- golang.org/x/sys v0.41.0 // indirect
- golang.org/x/telemetry v0.0.0-20260209163413-e7419c687ee4 // indirect
- golang.org/x/text v0.34.0 // indirect
- google.golang.org/protobuf v1.36.8 // indirect
- gopkg.in/ini.v1 v1.67.0 // indirect
- gopkg.in/yaml.v2 v2.4.0 // indirect
- gopkg.in/yaml.v3 v3.0.1 // indirect
- honnef.co/go/tools v0.7.0 // indirect
- mvdan.cc/gofumpt v0.9.2 // indirect
- mvdan.cc/unparam v0.0.0-20251027182757-5beb8c8f8f15 // indirect
-)
diff --git a/tools/tools.go b/tools/tools.go
index e9567c7f..7023ef96 100644
--- a/tools/tools.go
+++ b/tools/tools.go
@@ -1,7 +1,9 @@
-//go:build tools
-
package tools
+// Generate copyright headers
+// nolint:misspell // copywrite is correct here
+//go:generate go run github.com/hashicorp/copywrite headers -d .. --config ../.copywrite.hcl
+
// Format Terraform code for use in documentation.
// If you do not have Terraform installed, you can remove the formatting command, but it is suggested
// to ensure the documentation is formatted properly.
@@ -9,11 +11,3 @@ package tools
// Generate documentation.
//go:generate go run github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs generate --provider-dir .. -provider-name stackitprivatepreview
-
-import (
- _ "github.com/golangci/golangci-lint/v2/cmd/golangci-lint"
- _ "github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework"
- _ "github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi"
- _ "github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs"
- _ "golang.org/x/tools/cmd/goimports"
-)