Compare commits

..

No commits in common. "alpha" and "v0.0.10-alpha" have entirely different histories.

176 changed files with 8261 additions and 11353 deletions

View file

@ -2,11 +2,6 @@ name: Acceptance Testing
description: "Acceptance Testing pipeline" description: "Acceptance Testing pipeline"
inputs: inputs:
test_timeout_string:
description: "string that determines the timeout (default: 45m)"
default: '45m'
required: true
go-version: go-version:
description: "go version to install" description: "go version to install"
default: '1.25' default: '1.25'
@ -16,78 +11,38 @@ inputs:
description: "STACKIT project ID for tests" description: "STACKIT project ID for tests"
required: true required: true
project_user_email:
required: true
description: "project user email for acc testing"
tf_acc_kek_key_id:
description: "KEK key ID"
required: true
tf_acc_kek_key_ring_id:
description: "KEK key ring ID"
required: true
tf_acc_kek_key_version:
description: "KEK key version"
required: true
tf_acc_kek_service_account:
description: "KEK service account email"
required: true
region: region:
description: "STACKIT region for tests" description: "STACKIT region for tests"
default: 'eu01' default: 'eu01'
required: true required: true
service_account_json_content: service_account_json:
description: "STACKIT service account JSON file contents" description: "STACKIT service account JSON file contents"
required: true required: true
default: ""
service_account_json_content_b64:
description: "STACKIT service account JSON file contents"
required: true
default: ""
service_account_json_file_path:
description: "STACKIT service account JSON file contents"
required: true
default: 'service_account.json'
test_file: test_file:
description: "testfile to run" description: "testfile to run"
default: '' default: ''
outputs:
#outputs: random-number:
# random-number: description: "Random number"
# description: "Random number" value: ${{ steps.random-number-generator.outputs.random-number }}
# value: ${{ steps.random-number-generator.outputs.random-number }}
runs: runs:
using: "composite" using: "composite"
steps: steps:
# - name: Random Number Generator - name: Random Number Generator
# id: random-number-generator id: random-number-generator
# run: echo "random-number=$(echo $RANDOM)" >> $GITHUB_OUTPUT run: echo "random-number=$(echo $RANDOM)" >> $GITHUB_OUTPUT
# shell: bash shell: bash
- name: Install needed tools - name: Install needed tools
shell: bash shell: bash
run: | run: |
echo "::group::apt install"
set -e set -e
apt-get -y -qq update >apt_update.log 2>apt_update_err.log apt-get -y -qq update
if [ $? -ne 0 ]; then apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget
cat apt_update.log apt_update_err.log
fi
apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget >apt_get.log 2>apt_get_err.log
if [ $? -ne 0 ]; then
cat apt_get.log apt_get_err.log
fi
echo "::endgroup::"
- name: Setup JAVA - name: Setup JAVA
uses: actions/setup-java@v5 uses: actions/setup-java@v5
@ -98,165 +53,62 @@ runs:
- name: Install Go ${{ inputs.go-version }} - name: Install Go ${{ inputs.go-version }}
uses: actions/setup-go@v6 uses: actions/setup-go@v6
with: with:
# go-version: ${{ inputs.go-version }} go-version: ${{ inputs.go-version }}
check-latest: true check-latest: true
go-version-file: 'go.mod' go-version-file: 'go.mod'
- name: Determine GOMODCACHE
shell: bash
id: goenv
run: |
set -e
echo "gomodcache=$(go env GOMODCACHE)" >> "$GITHUB_OUTPUT"
- name: Restore cached GO pkg
id: cache-gopkg
uses: actions/cache/restore@v5
with:
path: "${{ steps.goenv.outputs.gomodcache }}"
key: ${{ runner.os }}-gopkg
- name: Install go tools - name: Install go tools
if: steps.cache-gopkg.outputs.cache-hit != 'true'
shell: bash shell: bash
run: | run: |
echo "::group::go install"
set -e set -e
go mod download go mod download
go install golang.org/x/tools/cmd/goimports@latest go install golang.org/x/tools/cmd/goimports@latest
go install github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework@latest go install github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.7.2
go install github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi@latest go install github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs@v0.24.0
go install github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs@latest
go install github.com/golangci/golangci-lint/v2/cmd/golangci-lint@latest
echo "::endgroup::"
- name: Run go mod tidy
shell: bash
run: go mod tidy
- name: Save GO package Cache - name: Prepare pkg_gen directory
id: cache-gopkg-save
uses: actions/cache/save@v5
with:
path: |
${{ steps.goenv.outputs.gomodcache }}
key: ${{ runner.os }}-gopkg
- name: Creating service_account file from json input
if: inputs.service_account_json_content != ''
shell: bash shell: bash
run: | run: |
echo "::group::create service account file" go run cmd/main.go build -p
set -e
set -o pipefail
jsonFile="${{ inputs.service_account_json_file_path }}"
jsonFile="${jsonFile:-x}"
if [ "${jsonFile}" == "x" ]; then
echo "no service account file path provided"
exit 1
fi
if [ ! -f "${jsonFile}" ]; then
echo "creating service account file '${{ inputs.service_account_json_file_path }}'"
echo "${{ inputs.service_account_json_content }}" > stackit/"${{ inputs.service_account_json_file_path }}"
fi
ls -l stackit/"${{ inputs.service_account_json_file_path }}"
echo "::endgroup::"
- name: Creating service_account file from base64 json input
if: inputs.service_account_json_content_b64 != ''
shell: bash
run: |
echo "::group::create service account file"
set -e
set -o pipefail
jsonFile="${{ inputs.service_account_json_file_path }}"
jsonFile="${jsonFile:-x}"
if [ "${jsonFile}" == "x" ]; then
echo "no service account file path provided"
exit 1
fi
if [ ! -f "${jsonFile}" ]; then
echo "creating service account file '${{ inputs.service_account_json_file_path }}'"
echo "${{ inputs.service_account_json_content_b64 }}" | base64 -d > stackit/"${{ inputs.service_account_json_file_path }}"
fi
ls -l stackit/"${{ inputs.service_account_json_file_path }}"
echo "::endgroup::"
- name: Run acceptance test file - name: Run acceptance test file
if: ${{ inputs.test_file != '' }} if: ${{ inputs.test_file != '' }}
shell: bash shell: bash
run: | run: |
echo "::group::go test file"
set -e
set -o pipefail
echo "Running acceptance tests for the terraform provider" echo "Running acceptance tests for the terraform provider"
cd stackit || exit 1 echo "${STACKIT_SERVICE_ACCOUNT_JSON}" > ~/.service_account.json
cd stackit
TF_ACC=1 \ TF_ACC=1 \
TF_ACC_PROJECT_ID=${TF_ACC_PROJECT_ID} \ TF_ACC_PROJECT_ID=${TF_ACC_PROJECT_ID} \
TF_ACC_REGION=${TF_ACC_REGION} \ TF_ACC_REGION=${TF_ACC_REGION} \
TF_ACC_TEST_PROJECT_USER_EMAIL=${TF_ACC_TEST_PROJECT_USER_EMAIL} \ go test ${{ inputs.test_file }} -count=1 -timeout=30m
TF_ACC_SERVICE_ACCOUNT_FILE="${PWD}/${{ inputs.service_account_json_file_path }}" \
TF_ACC_KEK_KEY_ID=${TF_ACC_KEK_KEY_ID} \
TF_ACC_KEK_KEY_RING_ID=${TF_ACC_KEK_KEY_RING_ID} \
TF_ACC_KEK_KEY_VERSION=${TF_ACC_KEK_KEY_VERSION} \
TF_ACC_KEK_SERVICE_ACCOUNT=${TF_ACC_KEK_SERVICE_ACCOUNT} \
go test ${{ inputs.test_file }} -count=1 -timeout=${{ inputs.test_timeout_string }}
echo "::endgroup::"
env: env:
TF_ACC_PROJECT_ID: ${{ inputs.project_id }} STACKIT_SERVICE_ACCOUNT_JSON: ${{ inputs.service_account_json }}
TF_PROJECT_ID: ${{ inputs.project_id }}
TF_ACC_REGION: ${{ inputs.region }} TF_ACC_REGION: ${{ inputs.region }}
TF_ACC_TEST_PROJECT_USER_EMAIL: ${{ inputs.project_user_email }} # TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL: ${{ secrets.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL }}
TF_ACC_KEK_KEY_ID: ${{ inputs.tf_acc_kek_key_id }} # TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN: ${{ secrets.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN }}
TF_ACC_KEK_KEY_RING_ID: ${{ inputs.tf_acc_kek_key_ring_id }} # TF_ACC_TEST_PROJECT_PARENT_CONTAINER_ID: ${{ secrets.TF_ACC_TEST_PROJECT_PARENT_CONTAINER_ID }}
TF_ACC_KEK_KEY_VERSION: ${{ inputs.tf_acc_kek_key_version }} # TF_ACC_TEST_PROJECT_PARENT_UUID: ${{ secrets.TF_ACC_TEST_PROJECT_PARENT_UUID }}
TF_ACC_KEK_SERVICE_ACCOUNT: ${{ inputs.tf_acc_kek_service_account }} # TF_ACC_TEST_PROJECT_USER_EMAIL: ${{ secrets.TF_ACC_TEST_PROJECT_USER_EMAIL }}
# - name: Run test action
# if: ${{ inputs.test_file == '' }}
# env:
# TF_ACC: 1
# TF_ACC_PROJECT_ID: ${{ inputs.project_id }}
# TF_ACC_REGION: ${{ inputs.region }}
# TF_ACC_TEST_PROJECT_USER_EMAIL: ${{ inputs.project_user_email }}
# TF_ACC_KEK_KEY_ID: ${{ inputs.tf_acc_kek_key_id }}
# TF_ACC_KEK_KEY_RING_ID: ${{ inputs.tf_acc_kek_key_ring_id }}
# TF_ACC_KEK_KEY_VERSION: ${{ inputs.tf_acc_kek_key_version }}
# TF_ACC_KEK_SERVICE_ACCOUNT: ${{ inputs.tf_acc_kek_service_account }}
# TF_ACC_SERVICE_ACCOUNT_FILE: "${PWD}/${{ inputs.service_account_json_file_path }}"
# uses: robherley/go-test-action@v0.1.0
# with:
# testArguments: "./... -timeout 45m"
- name: Run acceptance tests - name: Run acceptance tests
if: ${{ inputs.test_file == '' }} if: ${{ inputs.test_file == '' }}
shell: bash shell: bash
run: | run: |
echo "::group::go test all"
set -e
set -o pipefail
echo "Running acceptance tests for the terraform provider" echo "Running acceptance tests for the terraform provider"
cd stackit || exit 1 echo "${STACKIT_SERVICE_ACCOUNT_JSON}" > ~/.service_account.json
cd stackit
TF_ACC=1 \ TF_ACC=1 \
TF_ACC_PROJECT_ID=${TF_ACC_PROJECT_ID} \ TF_ACC_PROJECT_ID=${TF_ACC_PROJECT_ID} \
TF_ACC_REGION=${TF_ACC_REGION} \ TF_ACC_REGION=${TF_ACC_REGION} \
TF_ACC_TEST_PROJECT_USER_EMAIL=${TF_ACC_TEST_PROJECT_USER_EMAIL} \ go test ./... -count=1 -timeout=30m
TF_ACC_SERVICE_ACCOUNT_FILE="${PWD}/${{ inputs.service_account_json_file_path }}" \
TF_ACC_KEK_KEY_ID=${TF_ACC_KEK_KEY_ID} \
TF_ACC_KEK_KEY_RING_ID=${TF_ACC_KEK_KEY_RING_ID} \
TF_ACC_KEK_KEY_VERSION=${TF_ACC_KEK_KEY_VERSION} \
TF_ACC_KEK_SERVICE_ACCOUNT=${TF_ACC_KEK_SERVICE_ACCOUNT} \
go test ./... -count=1 -timeout=${{ inputs.test_timeout_string }}
echo "::endgroup::"
env: env:
TF_ACC_PROJECT_ID: ${{ inputs.project_id }} STACKIT_SERVICE_ACCOUNT_JSON: ${{ inputs.service_account_json }}
TF_PROJECT_ID: ${{ inputs.project_id }}
TF_ACC_REGION: ${{ inputs.region }} TF_ACC_REGION: ${{ inputs.region }}
TF_ACC_TEST_PROJECT_USER_EMAIL: ${{ inputs.project_user_email }} # TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL: ${{ secrets.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL }}
TF_ACC_KEK_KEY_ID: ${{ inputs.tf_acc_kek_key_id }} # TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN: ${{ secrets.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN }}
TF_ACC_KEK_KEY_RING_ID: ${{ inputs.tf_acc_kek_key_ring_id }} # TF_ACC_TEST_PROJECT_PARENT_CONTAINER_ID: ${{ secrets.TF_ACC_TEST_PROJECT_PARENT_CONTAINER_ID }}
TF_ACC_KEK_KEY_VERSION: ${{ inputs.tf_acc_kek_key_version }} # TF_ACC_TEST_PROJECT_PARENT_UUID: ${{ secrets.TF_ACC_TEST_PROJECT_PARENT_UUID }}
TF_ACC_KEK_SERVICE_ACCOUNT: ${{ inputs.tf_acc_kek_service_account }} # TF_ACC_TEST_PROJECT_USER_EMAIL: ${{ secrets.TF_ACC_TEST_PROJECT_USER_EMAIL }}

View file

@ -20,63 +20,25 @@ runs:
run: | run: |
set -e set -e
apt-get -y -qq update apt-get -y -qq update
apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget unzip bc apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget
- name: Checkout
uses: actions/checkout@v6
- name: Install Go ${{ inputs.go-version }} - name: Install Go ${{ inputs.go-version }}
uses: actions/setup-go@v6 uses: actions/setup-go@v6
with: with:
# go-version: ${{ inputs.go-version }} go-version: ${{ inputs.go-version }}
check-latest: true check-latest: true
go-version-file: 'go.mod' go-version-file: 'go.mod'
- name: Determine GOMODCACHE
shell: bash
id: goenv
run: |
set -e
# echo "::set-output name=gomodcache::$(go env GOMODCACHE)"
echo "gomodcache=$(go env GOMODCACHE)" >> "$GITHUB_OUTPUT"
- name: Restore cached GO pkg
id: cache-gopkg
uses: actions/cache/restore@v5
with:
path: "${{ steps.goenv.outputs.gomodcache }}"
key: ${{ runner.os }}-gopkg
- name: Install go tools - name: Install go tools
if: steps.cache-gopkg.outputs.cache-hit != 'true'
shell: bash shell: bash
run: | run: |
set -e set -e
go install golang.org/x/tools/cmd/goimports@latest go install golang.org/x/tools/cmd/goimports@latest
go install github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework@latest go install github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework@latest
go install github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi@latest go install github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi@latest
go install github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs@latest go install github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs@v0.24.0
# - name: Run build pkg directory
# shell: bash
# run: |
# set -e
# go run generator/main.go build
- name: Get all go packages
if: steps.cache-gopkg.outputs.cache-hit != 'true'
shell: bash
run: |
set -e
go get ./...
- name: Save Cache
id: cache-gopkg-save
uses: actions/cache/save@v5
with:
path: |
${{ steps.goenv.outputs.gomodcache }}
key: ${{ runner.os }}-gopkg
- name: Setup JAVA ${{ inputs.java-distribution }} ${{ inputs.go-version }} - name: Setup JAVA ${{ inputs.java-distribution }} ${{ inputs.go-version }}
uses: actions/setup-java@v5 uses: actions/setup-java@v5
@ -84,6 +46,16 @@ runs:
distribution: ${{ inputs.java-distribution }} # See 'Supported distributions' for available options distribution: ${{ inputs.java-distribution }} # See 'Supported distributions' for available options
java-version: ${{ inputs.java-version }} java-version: ${{ inputs.java-version }}
- name: Checkout
uses: actions/checkout@v6
- name: Run build pkg directory
shell: bash
run: |
set -e
go run cmd/main.go build
- name: Run make to build app - name: Run make to build app
shell: bash shell: bash
run: | run: |

View file

@ -26,9 +26,9 @@ runs:
uses: https://code.forgejo.org/actions/setup-go@v6 uses: https://code.forgejo.org/actions/setup-go@v6
id: go-version id: go-version
with: with:
# go-version: ${{ inputs.go-version }} go-version: ${{ inputs.go-version }}
check-latest: true # Always check for the latest patch release check-latest: true # Always check for the latest patch release
go-version-file: "go.mod" # go-version-file: "go.mod"
# do not cache dependencies, we do this manually # do not cache dependencies, we do this manually
cache: false cache: false

View file

@ -22,39 +22,6 @@ env:
CODE_COVERAGE_ARTIFACT_NAME: "code-coverage" CODE_COVERAGE_ARTIFACT_NAME: "code-coverage"
jobs: jobs:
runner_test:
name: "Test STACKIT runner"
runs-on: stackit-docker
steps:
- name: Install needed tools
run: |
apt-get -y -qq update
apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget
- name: Setup Go
uses: actions/setup-go@v6
with:
go-version: ${{ env.GO_VERSION }}
- name: Install go tools
run: |
go install golang.org/x/tools/cmd/goimports@latest
go install github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework@latest
go install github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi@latest
- name: Setup JAVA
uses: actions/setup-java@v5
with:
distribution: 'temurin' # See 'Supported distributions' for available options
java-version: '21'
- name: Checkout
uses: actions/checkout@v6
- name: Run build pkg directory
run: |
go run cmd/main.go build
publish_test: publish_test:
name: "Test readiness for publishing provider" name: "Test readiness for publishing provider"
needs: config needs: config
@ -137,67 +104,11 @@ jobs:
--gpgPubKeyFile=public_key.pem \ --gpgPubKeyFile=public_key.pem \
--version=${VERSION} --version=${VERSION}
testing:
name: CI run tests
runs-on: ubuntu-latest
needs: config
env:
TF_ACC_PROJECT_ID: ${{ vars.TF_ACC_PROJECT_ID }}
TF_ACC_REGION: ${{ vars.TF_ACC_REGION }}
TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL: ${{ vars.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL }}
TF_ACC_SERVICE_ACCOUNT_FILE: "~/service_account.json"
steps:
- name: Checkout
uses: actions/checkout@v6
- name: Build
uses: ./.github/actions/build
with:
go-version: ${{ env.GO_VERSION }}
- name: Setup Terraform
uses: hashicorp/setup-terraform@v2
with:
terraform_wrapper: false
- name: Create service account json file
if: ${{ github.event_name == 'pull_request' }}
run: |
echo "${{ secrets.TF_ACC_SERVICE_ACCOUNT_JSON }}" >~/service_account.json
- name: Run go mod tidy
if: ${{ github.event_name == 'pull_request' }}
run: go mod tidy
- name: Testing
run: make test
- name: Acceptance Testing
env:
TF_ACC: "1"
if: ${{ github.event_name == 'pull_request' }}
run: make test-acceptance-tf
- name: Check coverage threshold
shell: bash
run: |
make coverage
COVERAGE=$(go tool cover -func=coverage.out | grep total | awk '{print $3}' | sed 's/%//')
echo "Coverage: $COVERAGE%"
if (( $(echo "$COVERAGE < 80" | bc -l) )); then
echo "Coverage is below 80%"
# exit 1
fi
- name: Archive code coverage results
uses: actions/upload-artifact@v4
with:
name: ${{ env.CODE_COVERAGE_ARTIFACT_NAME }}
path: "stackit/${{ env.CODE_COVERAGE_FILE_NAME }}"
main: main:
if: ${{ github.event_name != 'schedule' }} if: ${{ github.event_name != 'schedule' }}
name: CI run build and linting name: CI
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: config needs: config
steps: steps:
@ -226,37 +137,31 @@ jobs:
- name: golangci-lint - name: golangci-lint
uses: golangci/golangci-lint-action@v9 uses: golangci/golangci-lint-action@v9
with: with:
version: v2.9 version: v2.7
args: --config=golang-ci.yaml --allow-parallel-runners --timeout=5m args: --config=golang-ci.yaml --allow-parallel-runners --timeout=5m
continue-on-error: true
- name: Linting - name: Lint
run: make lint run: make lint
continue-on-error: true
# - name: Testing - name: Test
# run: make test run: make test
#
# - name: Acceptance Testing
# if: ${{ github.event_name == 'pull_request' }}
# run: make test-acceptance-tf
#
# - name: Check coverage threshold
# shell: bash
# run: |
# make coverage
# COVERAGE=$(go tool cover -func=coverage.out | grep total | awk '{print $3}' | sed 's/%//')
# echo "Coverage: $COVERAGE%"
# if (( $(echo "$COVERAGE < 80" | bc -l) )); then
# echo "Coverage is below 80%"
# # exit 1
# fi
# - name: Archive code coverage results - name: Check coverage threshold
# uses: actions/upload-artifact@v4 shell: bash
# with: run: |
# name: ${{ env.CODE_COVERAGE_ARTIFACT_NAME }} make coverage
# path: "stackit/${{ env.CODE_COVERAGE_FILE_NAME }}" COVERAGE=$(go tool cover -func=coverage.out | grep total | awk '{print $3}' | sed 's/%//')
echo "Coverage: $COVERAGE%"
if (( $(echo "$COVERAGE < 80" | bc -l) )); then
echo "Coverage is below 80%"
# exit 1
fi
- name: Archive code coverage results
uses: actions/upload-artifact@v4
with:
name: ${{ env.CODE_COVERAGE_ARTIFACT_NAME }}
path: "stackit/${{ env.CODE_COVERAGE_FILE_NAME }}"
config: config:
if: ${{ github.event_name != 'schedule' }} if: ${{ github.event_name != 'schedule' }}

View file

@ -1,343 +0,0 @@
name: CI Workflow
on:
pull_request:
branches:
- alpha
- main
workflow_dispatch:
schedule:
# every sunday at 00:00
# - cron: '0 0 * * 0'
# every day at 00:00
- cron: '0 0 * * *'
push:
branches:
- '!main'
- '!alpha'
paths:
- '!.github'
env:
GO_VERSION: "1.25"
CODE_COVERAGE_FILE_NAME: "coverage.out" # must be the same as in Makefile
CODE_COVERAGE_ARTIFACT_NAME: "code-coverage"
jobs:
config:
if: ${{ github.event_name != 'schedule' }}
name: Check GoReleaser config
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v6
- name: Check GoReleaser
uses: goreleaser/goreleaser-action@v7
with:
args: check
prepare:
name: Prepare GO cache
runs-on: ubuntu-latest
permissions:
actions: read # Required to identify workflow run.
checks: write # Required to add status summary.
contents: read # Required to checkout repository.
pull-requests: write # Required to add PR comment.
steps:
- name: Checkout
uses: actions/checkout@v6
- name: Install Go ${{ inputs.go-version }}
id: go-install
uses: actions/setup-go@v6
with:
# go-version: ${{ inputs.go-version }}
check-latest: true
go-version-file: 'go.mod'
- name: Determine GOMODCACHE
shell: bash
id: goenv
run: |
set -e
# echo "::set-output name=gomodcache::$(go env GOMODCACHE)"
echo "gomodcache=$(go env GOMODCACHE)" >> "$GITHUB_OUTPUT"
- name: Restore cached GO pkg
id: cache-gopkg
uses: actions/cache/restore@v5
with:
path: "${{ steps.goenv.outputs.gomodcache }}"
key: ${{ runner.os }}-gopkg
- name: Install go tools
if: steps.cache-gopkg.outputs.cache-hit != 'true'
run: |
go install golang.org/x/tools/cmd/goimports@latest
go install github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework@latest
go install github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi@latest
- name: Get all go packages
if: steps.cache-gopkg.outputs.cache-hit != 'true'
shell: bash
run: |
set -e
go get ./...
- name: Save Cache
if: steps.cache-gopkg.outputs.cache-hit != 'true'
id: cache-gopkg-save
uses: actions/cache/save@v5
with:
path: |
${{ steps.goenv.outputs.gomodcache }}
key: ${{ runner.os }}-gopkg
publish_test:
name: "Test readiness for publishing provider"
needs:
- config
- prepare
runs-on: ubuntu-latest
permissions:
actions: read # Required to identify workflow run.
checks: write # Required to add status summary.
contents: read # Required to checkout repository.
pull-requests: write # Required to add PR comment.
steps:
- name: Install needed tools
run: |
apt-get -y -qq update
apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget unzip bc
- name: Checkout
uses: actions/checkout@v6
- name: Setup Go
uses: actions/setup-go@v6
with:
# go-version: ${{ env.GO_VERSION }}
check-latest: true
go-version-file: 'go.mod'
- name: Install go tools
run: |
go install golang.org/x/tools/cmd/goimports@latest
go install github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework@latest
go install github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi@latest
- name: Setup JAVA
uses: actions/setup-java@v5
with:
distribution: 'temurin' # See 'Supported distributions' for available options
java-version: '21'
# - name: Run build pkg directory
# run: |
# go run generator/main.go build
- name: Set up s3cfg
run: |
cat <<'EOF' >> ~/.s3cfg
[default]
host_base = https://object.storage.eu01.onstackit.cloud
host_bucket = https://%(bucket).object.storage.eu01.onstackit.cloud
check_ssl_certificate = False
access_key = ${{ secrets.S3_ACCESS_KEY }}
secret_key = ${{ secrets.S3_SECRET_KEY }}
EOF
- name: Import GPG key
run: |
echo "${{ secrets.PRIVATE_KEY_PEM }}" > ~/private.key.pem
gpg --import ~/private.key.pem
rm ~/private.key.pem
- name: Run GoReleaser with SNAPSHOT
id: goreleaser
env:
GITHUB_TOKEN: ${{ env.FORGEJO_TOKEN }}
GPG_FINGERPRINT: ${{ secrets.GPG_FINGERPRINT }}
uses: goreleaser/goreleaser-action@v7
with:
args: release --skip publish --clean --snapshot
- name: Prepare key file
run: |
echo "${{ secrets.PUBLIC_KEY_PEM }}" >public_key.pem
- name: Prepare provider directory structure
run: |
VERSION=$(jq -r .version < dist/metadata.json)
go run generator/main.go \
publish \
--namespace=mhenselin \
--providerName=stackitprivatepreview \
--repoName=terraform-provider-stackitprivatepreview \
--domain=tfregistry.sysops.stackit.rocks \
--gpgFingerprint="${{ secrets.GPG_FINGERPRINT }}" \
--gpgPubKeyFile=public_key.pem \
--version=${VERSION}
testing:
name: CI run tests
runs-on: ubuntu-latest
needs:
- config
- prepare
env:
TF_ACC_PROJECT_ID: ${{ vars.TF_ACC_PROJECT_ID }}
TF_ACC_ORGANIZATION_ID: ${{ vars.TF_ACC_ORGANIZATION_ID }}
TF_ACC_REGION: ${{ vars.TF_ACC_REGION }}
TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL: ${{ vars.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL }}
TF_ACC_SERVICE_ACCOUNT_FILE: "~/service_account.json"
steps:
- name: Checkout
uses: actions/checkout@v6
- name: Build
uses: ./.github/actions/build
with:
go-version: ${{ env.GO_VERSION }}
- name: Setup Terraform
uses: hashicorp/setup-terraform@v2
with:
terraform_wrapper: false
- name: Create service account json file
if: ${{ github.event_name == 'pull_request' }}
run: |
echo "${{ secrets.TF_ACC_SERVICE_ACCOUNT_JSON }}" >~/.service_account.json
- name: Run go mod tidy
if: ${{ github.event_name == 'pull_request' }}
run: go mod tidy
- name: Testing
run: |
TF_ACC_SERVICE_ACCOUNT_FILE=~/.service_account.json
export TF_ACC_SERVICE_ACCOUNT_FILE
make test
# - name: Acceptance Testing
# env:
# TF_ACC: "1"
# if: ${{ github.event_name == 'pull_request' }}
# run: |
# TF_ACC_SERVICE_ACCOUNT_FILE=~/.service_account.json
# export TF_ACC_SERVICE_ACCOUNT_FILE
# make test-acceptance-tf
- name: Run Test
if: ${{ github.event_name == 'pull_request' }}
uses: ./.github/actions/acc_test
with:
go-version: ${{ env.GO_VERSION }}
project_id: ${{ vars.TF_ACC_PROJECT_ID }}
region: ${{ vars.TF_ACC_REGION }}
service_account_json_content_b64: "${{ secrets.TF_ACC_SERVICE_ACCOUNT_JSON_B64 }}"
project_user_email: ${{ vars.TEST_PROJECT_USER_EMAIL }}
tf_acc_kek_key_id: ${{ vars.TF_ACC_KEK_KEY_ID }}
tf_acc_kek_key_ring_id: ${{ vars.TF_ACC_KEK_KEY_RING_ID }}
tf_acc_kek_key_version: ${{ vars.TF_ACC_KEK_KEY_VERSION }}
tf_acc_kek_service_account: ${{ vars.TF_ACC_KEK_SERVICE_ACCOUNT }}
# service_account_json_file_path: "~/service_account.json"
- name: Check coverage threshold
shell: bash
run: |
make coverage
COVERAGE=$(go tool cover -func=coverage.out | grep total | awk '{print $3}' | sed 's/%//')
echo "Coverage: $COVERAGE%"
if (( $(echo "$COVERAGE < 80" | bc -l) )); then
echo "Coverage is below 80%"
# exit 1
fi
- name: Archive code coverage results
uses: actions/upload-artifact@v4
with:
name: ${{ env.CODE_COVERAGE_ARTIFACT_NAME }}
path: "stackit/${{ env.CODE_COVERAGE_FILE_NAME }}"
main:
if: ${{ github.event_name != 'schedule' }}
name: CI run build and linting
runs-on: ubuntu-latest
needs:
- config
- prepare
steps:
- name: Checkout
uses: actions/checkout@v6
# - uses: actions/cache@v5
# id: cache
# with:
# path: path/to/dependencies
# key: ${{ runner.os }}-${{ hashFiles('**/lockfiles') }}
# - name: Install Dependencies
# if: steps.cache.outputs.cache-hit != 'true'
# run: /install.sh
- name: Build
uses: ./.github/actions/build
with:
go-version: ${{ env.GO_VERSION }}
- name: Setup Terraform
uses: hashicorp/setup-terraform@v2
with:
terraform_wrapper: false
- name: "Ensure docs are up-to-date"
if: ${{ github.event_name == 'pull_request' }}
run: ./scripts/check-docs.sh
continue-on-error: true
- name: "Run go mod tidy"
if: ${{ github.event_name == 'pull_request' }}
run: go mod tidy
- name: golangci-lint
uses: golangci/golangci-lint-action@v9
with:
version: v2.10
args: --config=.golang-ci.yaml --allow-parallel-runners --timeout=5m
continue-on-error: true
- name: Linting terraform files
run: make lint-tf
continue-on-error: true
code_coverage:
name: "Code coverage report"
if: github.event_name == 'pull_request' # Do not run when workflow is triggered by push to main branch
runs-on: ubuntu-latest
needs:
- main
- prepare
permissions:
contents: read
actions: read # to download code coverage results from "main" job
pull-requests: write # write permission needed to comment on PR
steps:
- name: Install needed tools
shell: bash
run: |
set -e
apt-get -y -qq update
apt-get -y -qq install sudo
- name: Check new code coverage
uses: fgrosse/go-coverage-report@v1.2.0
continue-on-error: true # Add this line to prevent pipeline failures in forks
with:
coverage-artifact-name: ${{ env.CODE_COVERAGE_ARTIFACT_NAME }}
coverage-file-name: ${{ env.CODE_COVERAGE_FILE_NAME }}
root-package: 'github.com/stackitcloud/terraform-provider-stackit'

View file

@ -23,7 +23,7 @@ jobs:
uses: actions/checkout@v6 uses: actions/checkout@v6
- name: Check GoReleaser - name: Check GoReleaser
uses: goreleaser/goreleaser-action@v7 uses: goreleaser/goreleaser-action@v6
with: with:
args: check args: check
@ -43,15 +43,10 @@ jobs:
apt-get -y -qq update apt-get -y -qq update
apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget
- name: Checkout
uses: actions/checkout@v6
- name: Setup Go - name: Setup Go
uses: actions/setup-go@v6 uses: actions/setup-go@v6
with: with:
# go-version: ${{ env.GO_VERSION }} go-version: ${{ env.GO_VERSION }}
check-latest: true
go-version-file: 'go.mod'
- name: Install go tools - name: Install go tools
run: | run: |
@ -65,6 +60,16 @@ jobs:
distribution: 'temurin' # See 'Supported distributions' for available options distribution: 'temurin' # See 'Supported distributions' for available options
java-version: '21' java-version: '21'
- name: Checkout
uses: actions/checkout@v6
- name: Run build pkg directory
run: |
set -e
mkdir -p generated/services
mkdir -p generated/internal/services
go run cmd/main.go build
- name: Set up s3cfg - name: Set up s3cfg
run: | run: |
cat <<'EOF' >> ~/.s3cfg cat <<'EOF' >> ~/.s3cfg
@ -88,7 +93,7 @@ jobs:
env: env:
GITHUB_TOKEN: ${{ env.FORGEJO_TOKEN }} GITHUB_TOKEN: ${{ env.FORGEJO_TOKEN }}
GPG_FINGERPRINT: ${{ secrets.GPG_FINGERPRINT }} GPG_FINGERPRINT: ${{ secrets.GPG_FINGERPRINT }}
uses: goreleaser/goreleaser-action@v7 uses: goreleaser/goreleaser-action@v6
with: with:
args: release --skip publish --clean --snapshot args: release --skip publish --clean --snapshot
@ -98,7 +103,7 @@ jobs:
env: env:
GITHUB_TOKEN: ${{ env.FORGEJO_TOKEN }} GITHUB_TOKEN: ${{ env.FORGEJO_TOKEN }}
GPG_FINGERPRINT: ${{ secrets.GPG_FINGERPRINT }} GPG_FINGERPRINT: ${{ secrets.GPG_FINGERPRINT }}
uses: goreleaser/goreleaser-action@v7 uses: goreleaser/goreleaser-action@v6
with: with:
args: release --skip publish --clean args: release --skip publish --clean
@ -109,7 +114,7 @@ jobs:
- name: Prepare provider directory structure - name: Prepare provider directory structure
run: | run: |
VERSION=$(jq -r .version < dist/metadata.json) VERSION=$(jq -r .version < dist/metadata.json)
go run generator/main.go \ go run cmd/main.go \
publish \ publish \
--namespace=mhenselin \ --namespace=mhenselin \
--providerName=stackitprivatepreview \ --providerName=stackitprivatepreview \

View file

@ -22,19 +22,17 @@ jobs:
with: with:
# Allow goreleaser to access older tag information. # Allow goreleaser to access older tag information.
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-go@v5
- uses: https://code.forgejo.org/actions/setup-go@v6
with: with:
go-version-file: "go.mod" go-version-file: "go.mod"
cache: true cache: true
- name: Import GPG key - name: Import GPG key
uses: crazy-max/ghaction-import-gpg@v6 uses: crazy-max/ghaction-import-gpg@v6
id: import_gpg id: import_gpg
with: with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }} gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
- name: Run GoReleaser - name: Run GoReleaser
uses: goreleaser/goreleaser-action@v7 uses: goreleaser/goreleaser-action@v6
with: with:
args: release --clean args: release --clean
env: env:

View file

@ -1,29 +0,0 @@
name: Runner stats
on:
workflow_dispatch:
jobs:
stats-own:
name: "Get own runner stats"
runs-on: ubuntu-latest
steps:
- name: Install needed tools
run: |
apt-get -y -qq update
apt-get -y -qq install inxi
- name: Show stats
run: inxi -c 0
stats-stackit:
name: "Get STACKIT runner stats"
runs-on: stackit-docker
steps:
- name: Install needed tools
run: |
apt-get -y -qq update
apt-get -y -qq install inxi
- name: Show stats
run: inxi -c 0

View file

@ -18,12 +18,6 @@ jobs:
uses: ./.github/actions/acc_test uses: ./.github/actions/acc_test
with: with:
go-version: ${{ env.GO_VERSION }} go-version: ${{ env.GO_VERSION }}
project_id: ${{ vars.TF_ACC_PROJECT_ID }} project_id: ${{ vars.TEST_PROJECT_ID }}
region: 'eu01' region: 'eu01'
service_account_json_content_b64: "${{ secrets.TF_ACC_SERVICE_ACCOUNT_JSON_B64 }}" service_account_json: ${{ secrets.TF_ACC_SERVICE_ACCOUNT_JSON }}
project_user_email: ${{ vars.TEST_PROJECT_USER_EMAIL }}
tf_acc_kek_key_id: ${{ vars.TF_ACC_KEK_KEY_ID }}
tf_acc_kek_key_ring_id: ${{ vars.TF_ACC_KEK_KEY_RING_ID }}
tf_acc_kek_key_version: ${{ vars.TF_ACC_KEK_KEY_VERSION }}
tf_acc_kek_service_account: ${{ vars.TF_ACC_KEK_SERVICE_ACCOUNT }}
# service_account_json_file_path: "~/service_account.json"

3
.gitignore vendored
View file

@ -40,12 +40,9 @@ coverage.out
coverage.html coverage.html
generated generated
stackit-sdk-generator stackit-sdk-generator
stackit-sdk-generator/**
dist dist
.secrets .secrets
pkg_gen pkg_gen
/release/ /release/
.env
**/.env

View file

@ -12,20 +12,17 @@ project-tools:
# LINT # LINT
lint-golangci-lint: lint-golangci-lint:
@echo "Linting with golangci-lint" @echo "Linting with golangci-lint"
@go run github.com/golangci/golangci-lint/v2/cmd/golangci-lint run --fix --config .golang-ci.yaml @$(SCRIPTS_BASE)/lint-golangci-lint.sh
lint-tf: lint-tf:
@echo "Linting terraform files" @echo "Linting terraform files"
@terraform fmt -check -diff -recursive examples/ @terraform fmt -check -diff -recursive
@terraform fmt -check -diff -recursive stackit/
lint: lint-golangci-lint lint-tf lint: lint-golangci-lint lint-tf
# DOCUMENTATION GENERATION # DOCUMENTATION GENERATION
generate-docs: generate-docs:
@echo "Generating documentation with tfplugindocs" @echo "Generating documentation with tfplugindocs"
@$(SCRIPTS_BASE)/tfplugindocs.sh @$(SCRIPTS_BASE)/tfplugindocs.sh
build: build:
@ -40,12 +37,12 @@ fmt:
.PHONY: test coverage .PHONY: test coverage
test: test:
@echo "Running tests for the terraform provider" @echo "Running tests for the terraform provider"
@cd $(ROOT_DIR)/stackit && go test -timeout 0 ./... -count=1 -coverprofile=../coverage.out && cd $(ROOT_DIR) @cd $(ROOT_DIR)/stackit && go test ./... -count=1 -coverprofile=../coverage.out && cd $(ROOT_DIR)
# Test coverage # Test coverage
coverage: coverage:
@echo ">> Creating test coverage report for the terraform provider" @echo ">> Creating test coverage report for the terraform provider"
@cd $(ROOT_DIR)/stackit && (go test -timeout 0 ./... -count=1 -coverprofile=../coverage.out || true) && cd $(ROOT_DIR) @cd $(ROOT_DIR)/stackit && (go test ./... -count=1 -coverprofile=../coverage.out || true) && cd $(ROOT_DIR)
@cd $(ROOT_DIR)/stackit && go tool cover -html=../coverage.out -o ../coverage.html && cd $(ROOT_DIR) @cd $(ROOT_DIR)/stackit && go tool cover -html=../coverage.out -o ../coverage.html && cd $(ROOT_DIR)
test-acceptance-tf: test-acceptance-tf:

956
cmd/cmd/build/build.go Normal file
View file

@ -0,0 +1,956 @@
package build
import (
"bufio"
"bytes"
"errors"
"fmt"
"go/ast"
"go/parser"
"go/token"
"io"
"log"
"log/slog"
"os"
"os/exec"
"path"
"path/filepath"
"regexp"
"strconv"
"strings"
"text/template"
"github.com/ldez/go-git-cmd-wrapper/v2/clone"
"github.com/ldez/go-git-cmd-wrapper/v2/git"
)
const (
OAS_REPO_NAME = "stackit-api-specifications"
OAS_REPO = "https://github.com/stackitcloud/stackit-api-specifications.git"
GEN_REPO_NAME = "stackit-sdk-generator"
GEN_REPO = "https://github.com/stackitcloud/stackit-sdk-generator.git"
)
type version struct {
verString string
major int
minor int
}
type Builder struct {
SkipClone bool
SkipCleanup bool
PackagesOnly bool
}
func (b *Builder) Build() error {
slog.Info("Starting Builder")
if b.PackagesOnly {
slog.Info(" >>> only generating pkg_gen <<<")
}
root, err := getRoot()
if err != nil {
log.Fatal(err)
}
if root == nil || *root == "" {
return fmt.Errorf("unable to determine root directory from git")
}
slog.Info(" ... using root directory", "dir", *root)
if !b.PackagesOnly {
slog.Info(" ... Checking needed commands available")
err := checkCommands([]string{"tfplugingen-framework", "tfplugingen-openapi"})
if err != nil {
return err
}
}
if !b.SkipCleanup {
slog.Info("Cleaning up old packages directory")
err = os.RemoveAll(path.Join(*root, "pkg_gen"))
if err != nil {
return err
}
}
if !b.SkipCleanup && !b.PackagesOnly {
slog.Info("Cleaning up old packages directory")
err = os.RemoveAll(path.Join(*root, "pkg_gen"))
if err != nil {
return err
}
}
slog.Info("Creating generator dir", "dir", fmt.Sprintf("%s/%s", *root, GEN_REPO_NAME))
genDir := path.Join(*root, GEN_REPO_NAME)
if !b.SkipClone {
err = createGeneratorDir(GEN_REPO, genDir, b.SkipClone)
if err != nil {
return err
}
}
slog.Info("Creating oas repo dir", "dir", fmt.Sprintf("%s/%s", *root, OAS_REPO_NAME))
repoDir, err := createRepoDir(genDir, OAS_REPO, OAS_REPO_NAME, b.SkipClone)
if err != nil {
return fmt.Errorf("%s", err.Error())
}
slog.Info("Retrieving versions from subdirs")
// TODO - major
verMap, err := getVersions(repoDir)
if err != nil {
return fmt.Errorf("%s", err.Error())
}
slog.Info("Reducing to only latest or highest")
res, err := getOnlyLatest(verMap)
if err != nil {
return fmt.Errorf("%s", err.Error())
}
slog.Info("Creating OAS dir")
err = os.MkdirAll(path.Join(genDir, "oas"), 0755)
if err != nil {
return err
}
slog.Info("Copying OAS files")
for service, item := range res {
baseService := strings.TrimSuffix(service, "alpha")
baseService = strings.TrimSuffix(baseService, "beta")
itemVersion := fmt.Sprintf("v%d%s", item.major, item.verString)
if item.minor != 0 {
itemVersion = itemVersion + "" + strconv.Itoa(item.minor)
}
srcFile := path.Join(
repoDir,
"services",
baseService,
itemVersion,
fmt.Sprintf("%s.json", baseService),
)
dstFile := path.Join(genDir, "oas", fmt.Sprintf("%s.json", service))
_, err = copyFile(srcFile, dstFile)
if err != nil {
return fmt.Errorf("%s", err.Error())
}
}
slog.Info("Changing dir", "dir", genDir)
err = os.Chdir(genDir)
if err != nil {
return err
}
slog.Info("Calling make", "command", "generate-go-sdk")
cmd := exec.Command("make", "generate-go-sdk")
var stdOut, stdErr bytes.Buffer
cmd.Stdout = &stdOut
cmd.Stderr = &stdErr
if err = cmd.Start(); err != nil {
slog.Error("cmd.Start", "error", err)
return err
}
if err = cmd.Wait(); err != nil {
var exitErr *exec.ExitError
if errors.As(err, &exitErr) {
slog.Error("cmd.Wait", "code", exitErr.ExitCode(), "error", err, "stdout", stdOut.String(), "stderr", stdErr.String())
return fmt.Errorf("%s", stdErr.String())
}
if err != nil {
slog.Error("cmd.Wait", "err", err)
return err
}
}
slog.Info("Cleaning up go.mod and go.sum files")
cleanDir := path.Join(genDir, "sdk-repo-updated", "services")
dirEntries, err := os.ReadDir(cleanDir)
if err != nil {
return err
}
for _, entry := range dirEntries {
if entry.IsDir() {
err = deleteFiles(
path.Join(cleanDir, entry.Name(), "go.mod"),
path.Join(cleanDir, entry.Name(), "go.sum"),
)
if err != nil {
return err
}
}
}
slog.Info("Changing dir", "dir", *root)
err = os.Chdir(*root)
if err != nil {
return err
}
slog.Info("Rearranging package directories")
err = os.MkdirAll(path.Join(*root, "pkg_gen"), 0755) // noqa:gosec
if err != nil {
return err
}
srcDir := path.Join(genDir, "sdk-repo-updated", "services")
items, err := os.ReadDir(srcDir)
if err != nil {
return err
}
for _, item := range items {
if item.IsDir() {
slog.Info(" -> package", "name", item.Name())
tgtDir := path.Join(*root, "pkg_gen", item.Name())
if fileExists(tgtDir) {
delErr := os.RemoveAll(tgtDir)
if delErr != nil {
return delErr
}
}
err = os.Rename(path.Join(srcDir, item.Name()), tgtDir)
if err != nil {
return err
}
}
}
if !b.PackagesOnly {
slog.Info("Generating service boilerplate")
err = generateServiceFiles(*root, path.Join(*root, GEN_REPO_NAME))
if err != nil {
return err
}
slog.Info("Copying all service files")
err = CopyDirectory(
path.Join(*root, "generated", "internal", "services"),
path.Join(*root, "stackit", "internal", "services"),
)
if err != nil {
return err
}
err = createBoilerplate(*root, path.Join(*root, "stackit", "internal", "services"))
if err != nil {
return err
}
}
if !b.SkipCleanup {
slog.Info("Finally removing temporary files and directories")
err = os.RemoveAll(path.Join(*root, "generated"))
if err != nil {
slog.Error("RemoveAll", "dir", path.Join(*root, "generated"), "err", err)
return err
}
err = os.RemoveAll(path.Join(*root, GEN_REPO_NAME))
if err != nil {
slog.Error("RemoveAll", "dir", path.Join(*root, GEN_REPO_NAME), "err", err)
return err
}
slog.Info("Cleaning up", "dir", repoDir)
err = os.RemoveAll(filepath.Dir(repoDir))
if err != nil {
return fmt.Errorf("%s", err.Error())
}
}
slog.Info("Done")
return nil
}
type templateData struct {
PackageName string
PackageNameCamel string
PackageNamePascal string
NameCamel string
NamePascal string
NameSnake string
Fields []string
}
func fileExists(path string) bool {
_, err := os.Stat(path)
if os.IsNotExist(err) {
return false
}
if err != nil {
panic(err)
}
return true
}
func createBoilerplate(rootFolder, folder string) error {
services, err := os.ReadDir(folder)
if err != nil {
return err
}
for _, svc := range services {
if !svc.IsDir() {
continue
}
resources, err := os.ReadDir(path.Join(folder, svc.Name()))
if err != nil {
return err
}
var handleDS bool
var handleRes bool
var foundDS bool
var foundRes bool
for _, res := range resources {
if !res.IsDir() {
continue
}
resourceName := res.Name()
dsFile := path.Join(folder, svc.Name(), res.Name(), "datasources_gen", fmt.Sprintf("%s_data_source_gen.go", res.Name()))
handleDS = fileExists(dsFile)
resFile := path.Join(folder, svc.Name(), res.Name(), "resources_gen", fmt.Sprintf("%s_resource_gen.go", res.Name()))
handleRes = fileExists(resFile)
dsGoFile := path.Join(folder, svc.Name(), res.Name(), "datasource.go")
foundDS = fileExists(dsGoFile)
resGoFile := path.Join(folder, svc.Name(), res.Name(), "resource.go")
foundRes = fileExists(resGoFile)
if handleDS && !foundDS {
slog.Info(" creating missing datasource.go", "service", svc.Name(), "resource", resourceName)
if !ValidateSnakeCase(resourceName) {
return errors.New("resource name is invalid")
}
fields, tokenErr := getTokens(dsFile)
if tokenErr != nil {
return fmt.Errorf("error reading tokens: %w", tokenErr)
}
tplName := "data_source_scaffold.gotmpl"
err = writeTemplateToFile(
tplName,
path.Join(rootFolder, "cmd", "cmd", "build", "templates", tplName),
dsGoFile,
&templateData{
PackageName: svc.Name(),
PackageNameCamel: ToCamelCase(svc.Name()),
PackageNamePascal: ToPascalCase(svc.Name()),
NameCamel: ToCamelCase(resourceName),
NamePascal: ToPascalCase(resourceName),
NameSnake: resourceName,
Fields: fields,
},
)
if err != nil {
panic(err)
}
}
if handleRes && !foundRes {
slog.Info(" creating missing resource.go", "service", svc.Name(), "resource", resourceName)
if !ValidateSnakeCase(resourceName) {
return errors.New("resource name is invalid")
}
fields, tokenErr := getTokens(resFile)
if tokenErr != nil {
return fmt.Errorf("error reading tokens: %w", tokenErr)
}
tplName := "resource_scaffold.gotmpl"
err = writeTemplateToFile(
tplName,
path.Join(rootFolder, "cmd", "cmd", "build", "templates", tplName),
resGoFile,
&templateData{
PackageName: svc.Name(),
PackageNameCamel: ToCamelCase(svc.Name()),
PackageNamePascal: ToPascalCase(svc.Name()),
NameCamel: ToCamelCase(resourceName),
NamePascal: ToPascalCase(resourceName),
NameSnake: resourceName,
Fields: fields,
},
)
if err != nil {
return err
}
if !fileExists(path.Join(folder, svc.Name(), res.Name(), "functions.go")) {
slog.Info(" creating missing functions.go", "service", svc.Name(), "resource", resourceName)
if !ValidateSnakeCase(resourceName) {
return errors.New("resource name is invalid")
}
fncTplName := "functions_scaffold.gotmpl"
err = writeTemplateToFile(
fncTplName,
path.Join(rootFolder, "cmd", "cmd", "build", "templates", fncTplName),
path.Join(folder, svc.Name(), res.Name(), "functions.go"),
&templateData{
PackageName: svc.Name(),
PackageNameCamel: ToCamelCase(svc.Name()),
PackageNamePascal: ToPascalCase(svc.Name()),
NameCamel: ToCamelCase(resourceName),
NamePascal: ToPascalCase(resourceName),
NameSnake: resourceName,
},
)
if err != nil {
return err
}
}
}
}
}
return nil
}
func ucfirst(s string) string {
if len(s) == 0 {
return ""
}
return strings.ToUpper(s[:1]) + s[1:]
}
func writeTemplateToFile(tplName, tplFile, outFile string, data *templateData) error {
fn := template.FuncMap{
"ucfirst": ucfirst,
}
tmpl, err := template.New(tplName).Funcs(fn).ParseFiles(tplFile)
if err != nil {
return err
}
var f *os.File
f, err = os.Create(outFile)
if err != nil {
return err
}
err = tmpl.Execute(f, *data)
if err != nil {
return err
}
err = f.Close()
if err != nil {
return err
}
return nil
}
func generateServiceFiles(rootDir, generatorDir string) error {
// slog.Info("Generating specs folder")
err := os.MkdirAll(path.Join(rootDir, "generated", "specs"), 0755)
if err != nil {
return err
}
services, err := os.ReadDir(path.Join(rootDir, "service_specs"))
if err != nil {
return err
}
for _, service := range services {
if !service.IsDir() {
continue
}
versions, err := os.ReadDir(path.Join(rootDir, "service_specs", service.Name()))
if err != nil {
return err
}
for _, svcVersion := range versions {
if !svcVersion.IsDir() {
continue
}
// TODO: use const of supported versions
if svcVersion.Name() != "alpha" && svcVersion.Name() != "beta" {
continue
}
specFiles, err := os.ReadDir(path.Join(rootDir, "service_specs", service.Name(), svcVersion.Name()))
if err != nil {
return err
}
for _, specFile := range specFiles {
if specFile.IsDir() {
continue
}
// slog.Info("Checking spec", "name", spec.Name())
r := regexp.MustCompile(`^(.*)_config.yml$`)
matches := r.FindAllStringSubmatch(specFile.Name(), -1)
if matches != nil {
fileName := matches[0][0]
resource := matches[0][1]
slog.Info(
" found service spec",
"name",
specFile.Name(),
"service",
service.Name(),
"resource",
resource,
)
oasFile := path.Join(generatorDir, "oas", fmt.Sprintf("%s%s.json", service.Name(), svcVersion.Name()))
if _, oasErr := os.Stat(oasFile); os.IsNotExist(oasErr) {
slog.Warn(" could not find matching oas", "svc", service.Name(), "version", svcVersion.Name())
continue
}
scName := fmt.Sprintf("%s%s", service.Name(), svcVersion.Name())
scName = strings.ReplaceAll(scName, "-", "")
err = os.MkdirAll(path.Join(rootDir, "generated", "internal", "services", scName, resource), 0755)
if err != nil {
return err
}
// slog.Info("Generating openapi spec json")
specJsonFile := path.Join(rootDir, "generated", "specs", fmt.Sprintf("%s_%s_spec.json", scName, resource))
var stdOut, stdErr bytes.Buffer
// noqa:gosec
cmd := exec.Command(
"tfplugingen-openapi",
"generate",
"--config",
path.Join(rootDir, "service_specs", service.Name(), svcVersion.Name(), fileName),
"--output",
specJsonFile,
oasFile,
)
cmd.Stdout = &stdOut
cmd.Stderr = &stdErr
if err = cmd.Start(); err != nil {
slog.Error(
"tfplugingen-openapi generate",
"error",
err,
"stdOut",
stdOut.String(),
"stdErr",
stdErr.String(),
)
return err
}
if err = cmd.Wait(); err != nil {
var exitErr *exec.ExitError
if errors.As(err, &exitErr) {
slog.Error("tfplugingen-openapi generate", "code", exitErr.ExitCode(), "error", err, "stdout", stdOut.String(), "stderr", stdErr.String())
return fmt.Errorf("%s", stdErr.String())
}
if err != nil {
slog.Error("tfplugingen-openapi generate", "err", err, "stdout", stdOut.String(), "stderr", stdErr.String())
return err
}
}
if stdOut.Len() > 0 {
slog.Warn(" command output", "stdout", stdOut.String(), "stderr", stdErr.String())
}
// slog.Info("Creating terraform svc resource files folder")
tgtFolder := path.Join(rootDir, "generated", "internal", "services", scName, resource, "resources_gen")
err = os.MkdirAll(tgtFolder, 0755)
if err != nil {
return err
}
// slog.Info("Generating terraform svc resource files")
// noqa:gosec
cmd2 := exec.Command(
"tfplugingen-framework",
"generate",
"resources",
"--input",
specJsonFile,
"--output",
tgtFolder,
"--package",
scName,
)
cmd2.Stdout = &stdOut
cmd2.Stderr = &stdErr
if err = cmd2.Start(); err != nil {
slog.Error("tfplugingen-framework generate resources", "error", err)
return err
}
if err = cmd2.Wait(); err != nil {
var exitErr *exec.ExitError
if errors.As(err, &exitErr) {
slog.Error("tfplugingen-framework generate resources", "code", exitErr.ExitCode(), "error", err, "stdout", stdOut.String(), "stderr", stdErr.String())
return fmt.Errorf("%s", stdErr.String())
}
if err != nil {
slog.Error("tfplugingen-framework generate resources", "err", err, "stdout", stdOut.String(), "stderr", stdErr.String())
return err
}
}
// slog.Info("Creating terraform svc datasource files folder")
tgtFolder = path.Join(rootDir, "generated", "internal", "services", scName, resource, "datasources_gen")
err = os.MkdirAll(tgtFolder, 0755)
if err != nil {
return err
}
// slog.Info("Generating terraform svc resource files")
// noqa:gosec
cmd3 := exec.Command(
"tfplugingen-framework",
"generate",
"data-sources",
"--input",
specJsonFile,
"--output",
tgtFolder,
"--package",
scName,
)
var stdOut3, stdErr3 bytes.Buffer
cmd3.Stdout = &stdOut3
cmd3.Stderr = &stdErr3
if err = cmd3.Start(); err != nil {
slog.Error("tfplugingen-framework generate data-sources", "error", err)
return err
}
if err = cmd3.Wait(); err != nil {
var exitErr *exec.ExitError
if errors.As(err, &exitErr) {
slog.Error("tfplugingen-framework generate data-sources", "code", exitErr.ExitCode(), "error", err, "stdout", stdOut.String(), "stderr", stdErr.String())
return fmt.Errorf("%s", stdErr.String())
}
if err != nil {
slog.Error("tfplugingen-framework generate data-sources", "err", err, "stdout", stdOut.String(), "stderr", stdErr.String())
return err
}
}
tfAnoErr := handleTfTagForDatasourceFile(
path.Join(tgtFolder, fmt.Sprintf("%s_data_source_gen.go", resource)),
scName,
resource,
)
if tfAnoErr != nil {
return tfAnoErr
}
}
}
}
}
return nil
}
// handleTfTagForDatasourceFile replaces existing "id" with "stf_original_api_id"
func handleTfTagForDatasourceFile(filePath, service, resource string) error {
slog.Info(" handle terraform tag for datasource", "service", service, "resource", resource)
if !fileExists(filePath) {
slog.Warn(" could not find file, skipping", "path", filePath)
return nil
}
f, err := os.Open(filePath)
if err != nil {
return err
}
defer f.Close()
root, err := getRoot()
if err != nil {
log.Fatal(err)
}
tmp, err := os.CreateTemp(*root, "replace-*")
if err != nil {
return err
}
defer tmp.Close()
sc := bufio.NewScanner(f)
for sc.Scan() {
resLine, err := handleLine(sc.Text())
if err != nil {
return err
}
if _, err := io.WriteString(tmp, resLine+"\n"); err != nil {
return err
}
}
if scErr := sc.Err(); scErr != nil {
return scErr
}
if err := tmp.Close(); err != nil {
return err
}
if err := f.Close(); err != nil {
return err
}
if err := os.Rename(tmp.Name(), filePath); err != nil {
log.Fatal(err)
}
return nil
}
func handleLine(line string) (string, error) {
schemaRegex := regexp.MustCompile(`(\s+")(id)(": schema.[a-zA-Z0-9]+Attribute{)`)
schemaMatches := schemaRegex.FindAllStringSubmatch(line, -1)
if schemaMatches != nil {
return fmt.Sprintf("%stf_original_api_id%s", schemaMatches[0][1], schemaMatches[0][3]), nil
}
modelRegex := regexp.MustCompile(`(\s+Id\s+types.[a-zA-Z0-9]+\s+.tfsdk:")(id)(".)`)
modelMatches := modelRegex.FindAllStringSubmatch(line, -1)
if modelMatches != nil {
return fmt.Sprintf("%stf_original_api_id%s", modelMatches[0][1], modelMatches[0][3]), nil
}
return line, nil
}
func checkCommands(commands []string) error {
for _, commandName := range commands {
if !commandExists(commandName) {
return fmt.Errorf("missing command %s", commandName)
}
slog.Info(" found", "command", commandName)
}
return nil
}
func commandExists(cmd string) bool {
_, err := exec.LookPath(cmd)
return err == nil
}
func deleteFiles(fNames ...string) error {
for _, fName := range fNames {
if _, err := os.Stat(fName); !os.IsNotExist(err) {
err = os.Remove(fName)
if err != nil {
return err
}
}
}
return nil
}
func copyFile(src, dst string) (int64, error) {
sourceFileStat, err := os.Stat(src)
if err != nil {
return 0, err
}
if !sourceFileStat.Mode().IsRegular() {
return 0, fmt.Errorf("%s is not a regular file", src)
}
source, err := os.Open(src)
if err != nil {
return 0, err
}
defer source.Close()
destination, err := os.Create(dst)
if err != nil {
return 0, err
}
defer destination.Close()
nBytes, err := io.Copy(destination, source)
return nBytes, err
}
func getOnlyLatest(m map[string]version) (map[string]version, error) {
tmpMap := make(map[string]version)
for k, v := range m {
item, ok := tmpMap[k]
if !ok {
tmpMap[k] = v
} else {
if item.major == v.major && item.minor < v.minor {
tmpMap[k] = v
}
}
}
return tmpMap, nil
}
func getVersions(dir string) (map[string]version, error) {
res := make(map[string]version)
children, err := os.ReadDir(path.Join(dir, "services"))
if err != nil {
return nil, err
}
for _, entry := range children {
if entry.IsDir() {
versions, err := os.ReadDir(path.Join(dir, "services", entry.Name()))
if err != nil {
return nil, err
}
m, err2 := extractVersions(entry.Name(), versions)
if err2 != nil {
return m, err2
}
for k, v := range m {
res[k] = v
}
}
}
return res, nil
}
func extractVersions(service string, versionDirs []os.DirEntry) (map[string]version, error) {
res := make(map[string]version)
for _, vDir := range versionDirs {
if vDir.IsDir() {
r := regexp.MustCompile(`v([0-9]+)([a-z]+)([0-9]*)`)
matches := r.FindAllStringSubmatch(vDir.Name(), -1)
if matches == nil {
continue
}
svc, ver, err := handleVersion(service, matches[0])
if err != nil {
return nil, err
}
if svc != nil && ver != nil {
res[*svc] = *ver
}
}
}
return res, nil
}
func handleVersion(service string, match []string) (*string, *version, error) {
if match == nil {
fmt.Println("no matches")
return nil, nil, nil
}
verString := match[2]
if verString != "alpha" && verString != "beta" {
return nil, nil, errors.New("unsupported version")
}
majVer, err := strconv.Atoi(match[1])
if err != nil {
return nil, nil, err
}
if match[3] == "" {
match[3] = "0"
}
minVer, err := strconv.Atoi(match[3])
if err != nil {
return nil, nil, err
}
resStr := fmt.Sprintf("%s%s", service, verString)
return &resStr, &version{verString: verString, major: majVer, minor: minVer}, nil
}
func createRepoDir(root, repoUrl, repoName string, skipClone bool) (string, error) {
targetDir := path.Join(root, repoName)
if !skipClone {
if fileExists(targetDir) {
slog.Warn("target dir exists - skipping", "targetDir", targetDir)
return targetDir, nil
}
_, err := git.Clone(
clone.Repository(repoUrl),
clone.Directory(targetDir),
)
if err != nil {
return "", err
}
}
return targetDir, nil
}
func createGeneratorDir(repoUrl, targetDir string, skipClone bool) error {
if !skipClone {
if fileExists(targetDir) {
remErr := os.RemoveAll(targetDir)
if remErr != nil {
return remErr
}
}
_, cloneErr := git.Clone(
clone.Repository(repoUrl),
clone.Directory(targetDir),
)
if cloneErr != nil {
return cloneErr
}
}
return nil
}
func getRoot() (*string, error) {
cmd := exec.Command("git", "rev-parse", "--show-toplevel")
out, err := cmd.Output()
if err != nil {
return nil, err
}
lines := strings.Split(string(out), "\n")
return &lines[0], nil
}
func getTokens(fileName string) ([]string, error) {
fset := token.NewFileSet()
var result []string
node, err := parser.ParseFile(fset, fileName, nil, parser.ParseComments)
if err != nil {
return nil, err
}
ast.Inspect(node, func(n ast.Node) bool {
// Suche nach Typ-Deklarationen (structs)
ts, ok := n.(*ast.TypeSpec)
if ok {
if strings.Contains(ts.Name.Name, "Model") {
// fmt.Printf("found model: %s\n", ts.Name.Name)
ast.Inspect(ts, func(sn ast.Node) bool {
tts, tok := sn.(*ast.Field)
if tok {
// fmt.Printf(" found: %+v\n", tts.Names[0])
// spew.Dump(tts.Type)
result = append(result, tts.Names[0].String())
//fld, fldOk := tts.Type.(*ast.Ident)
//if fldOk {
// fmt.Printf("type: %+v\n", fld)
//}
}
return true
})
}
}
return true
})
return result, nil
}

View file

@ -3,7 +3,6 @@ package build
import ( import (
"fmt" "fmt"
"io" "io"
"log/slog"
"os" "os"
"path/filepath" "path/filepath"
"syscall" "syscall"
@ -75,24 +74,14 @@ func Copy(srcFile, dstFile string) error {
return err return err
} }
defer func(out *os.File) { defer out.Close()
err := out.Close()
if err != nil {
slog.Error("failed to close file", slog.Any("err", err))
}
}(out)
in, err := os.Open(srcFile) in, err := os.Open(srcFile)
if err != nil { if err != nil {
return err return err
} }
defer func(in *os.File) { defer in.Close()
err := in.Close()
if err != nil {
slog.Error("error closing destination file", slog.Any("err", err))
}
}(in)
_, err = io.Copy(out, in) _, err = io.Copy(out, in)
if err != nil { if err != nil {

View file

@ -39,14 +39,11 @@ type {{.NameCamel}}Resource struct{
providerData core.ProviderData providerData core.ProviderData
} }
// resourceModel represents the Terraform resource state
type resourceModel = {{.PackageName}}.{{.NamePascal}}Model
type {{.NamePascal}}ResourceIdentityModel struct { type {{.NamePascal}}ResourceIdentityModel struct {
ProjectID types.String `tfsdk:"project_id"` ProjectID types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"` Region types.String `tfsdk:"region"`
// TODO: implement further needed parts // TODO: implement further needed parts
{{.NamePascal}}ID types.String `tfsdk:"{{.NameSnake}}_id"` {{.NamePascal}}ID types.String `tfsdk:"instance_id"`
} }
// Metadata defines terraform resource name // Metadata defines terraform resource name
@ -54,9 +51,6 @@ func (r *{{.NameCamel}}Resource) Metadata(ctx context.Context, req resource.Meta
resp.TypeName = req.ProviderTypeName + "_{{.PackageName}}_{{.NameSnake}}" resp.TypeName = req.ProviderTypeName + "_{{.PackageName}}_{{.NameSnake}}"
} }
//go:embed planModifiers.yaml
var modifiersFileByte []byte
// Schema loads the schema from generated files and adds plan modifiers // Schema loads the schema from generated files and adds plan modifiers
func (r *{{.NameCamel}}Resource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { func (r *{{.NameCamel}}Resource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
schema = {{.PackageName}}ResGen.{{.NamePascal}}ResourceSchema(ctx) schema = {{.PackageName}}ResGen.{{.NamePascal}}ResourceSchema(ctx)
@ -88,7 +82,6 @@ func (r *instanceResource) IdentitySchema(_ context.Context, _ resource.Identity
"instance_id": identityschema.StringAttribute{ "instance_id": identityschema.StringAttribute{
RequiredForImport: true, // can be defaulted by the provider configuration RequiredForImport: true, // can be defaulted by the provider configuration
}, },
// TODO: implement remaining schema parts
}, },
} }
} }
@ -167,6 +160,9 @@ func (r *{{.NameCamel}}Resource) ModifyPlan(
} }
} }
//go:embed planModifiers.yaml
var modifiersFileByte []byte
// Create creates a new resource // Create creates a new resource
func (r *{{.NameCamel}}Resource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { func (r *{{.NameCamel}}Resource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
var data {{.PackageName}}ResGen.{{.NamePascal}}Model var data {{.PackageName}}ResGen.{{.NamePascal}}Model
@ -184,7 +180,6 @@ func (r *{{.NameCamel}}Resource) Create(ctx context.Context, req resource.Create
region := data.Region.ValueString() region := data.Region.ValueString()
ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "region", region) ctx = tflog.SetField(ctx, "region", region)
// TODO: add remaining fields
// TODO: Create API call logic // TODO: Create API call logic
/* /*

View file

@ -2,29 +2,24 @@ package cmd
import ( import (
"github.com/spf13/cobra" "github.com/spf13/cobra"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/cmd/cmd/build"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/generator/cmd/build"
) )
var ( var (
skipCleanup bool skipCleanup bool
skipClone bool skipClone bool
packagesOnly bool packagesOnly bool
verbose bool
debug bool
) )
var buildCmd = &cobra.Command{ var buildCmd = &cobra.Command{
Use: "build", Use: "build",
Short: "Build the necessary boilerplate", Short: "Build the necessary boilerplate",
Long: `...`, Long: `...`,
RunE: func(_ *cobra.Command, _ []string) error { RunE: func(cmd *cobra.Command, args []string) error {
b := build.Builder{ b := build.Builder{
SkipClone: skipClone, SkipClone: skipClone,
SkipCleanup: skipCleanup, SkipCleanup: skipCleanup,
PackagesOnly: packagesOnly, PackagesOnly: packagesOnly,
Verbose: verbose,
Debug: debug,
} }
return b.Build() return b.Build()
}, },
@ -34,10 +29,8 @@ func NewBuildCmd() *cobra.Command {
return buildCmd return buildCmd
} }
func init() { //nolint:gochecknoinits // This is the standard way to set up Cobra commands func init() { // nolint: gochecknoinits
buildCmd.Flags().BoolVarP(&skipCleanup, "skip-clean", "c", false, "Skip cleanup steps") buildCmd.Flags().BoolVarP(&skipCleanup, "skip-clean", "c", false, "Skip cleanup steps")
buildCmd.Flags().BoolVarP(&debug, "debug", "d", false, "Enable debug output")
buildCmd.Flags().BoolVarP(&skipClone, "skip-clone", "g", false, "Skip cloning from git") buildCmd.Flags().BoolVarP(&skipClone, "skip-clone", "g", false, "Skip cloning from git")
buildCmd.Flags().BoolVarP(&packagesOnly, "packages-only", "p", false, "Only generate packages") buildCmd.Flags().BoolVarP(&packagesOnly, "packages-only", "p", false, "Only generate packages")
buildCmd.Flags().BoolVarP(&verbose, "verbose", "v", false, "verbose - show more logs")
} }

View file

@ -24,7 +24,7 @@ var getFieldsCmd = &cobra.Command{
Use: "get-fields", Use: "get-fields",
Short: "get fields from file", Short: "get fields from file",
Long: `...`, Long: `...`,
PreRunE: func(_ *cobra.Command, _ []string) error { PreRunE: func(cmd *cobra.Command, args []string) error {
typeStr := "data_source" typeStr := "data_source"
if resType != "resource" && resType != "datasource" { if resType != "resource" && resType != "datasource" {
return fmt.Errorf("--type can only be resource or datasource") return fmt.Errorf("--type can only be resource or datasource")
@ -75,14 +75,14 @@ var getFieldsCmd = &cobra.Command{
filePath = p filePath = p
//// Enum check //// Enum check
// switch format { //switch format {
// case "json", "yaml": //case "json", "yaml":
//default: //default:
// return fmt.Errorf("invalid --format: %s (want json|yaml)", format) // return fmt.Errorf("invalid --format: %s (want json|yaml)", format)
//} //}
return nil return nil
}, },
RunE: func(_ *cobra.Command, _ []string) error { RunE: func(cmd *cobra.Command, args []string) error {
return getFields(filePath) return getFields(filePath)
}, },
} }
@ -107,26 +107,31 @@ func getTokens(fileName string) ([]string, error) {
return nil, err return nil, err
} }
ast.Inspect( ast.Inspect(node, func(n ast.Node) bool {
node, func(n ast.Node) bool {
// Suche nach Typ-Deklarationen (structs) // Suche nach Typ-Deklarationen (structs)
ts, ok := n.(*ast.TypeSpec) ts, ok := n.(*ast.TypeSpec)
if ok { if ok {
if strings.Contains(ts.Name.Name, "Model") { if strings.Contains(ts.Name.Name, "Model") {
ast.Inspect( // fmt.Printf("found model: %s\n", ts.Name.Name)
ts, func(sn ast.Node) bool { ast.Inspect(ts, func(sn ast.Node) bool {
tts, tok := sn.(*ast.Field) tts, tok := sn.(*ast.Field)
if tok { if tok {
// fmt.Printf(" found: %+v\n", tts.Names[0])
// spew.Dump(tts.Type)
result = append(result, tts.Names[0].String()) result = append(result, tts.Names[0].String())
//fld, fldOk := tts.Type.(*ast.Ident)
//if fldOk {
// fmt.Printf("type: %+v\n", fld)
//}
} }
return true return true
}, })
)
} }
} }
return true return true
}, })
)
return result, nil return result, nil
} }
@ -134,15 +139,9 @@ func NewGetFieldsCmd() *cobra.Command {
return getFieldsCmd return getFieldsCmd
} }
func init() { //nolint:gochecknoinits //this is the only way to add the command to the rootCmd func init() { // nolint: gochecknoinits
getFieldsCmd.Flags().StringVarP(&inFile, "infile", "i", "", "input filename incl path") getFieldsCmd.Flags().StringVarP(&inFile, "infile", "i", "", "input filename incl path")
getFieldsCmd.Flags().StringVarP(&svcName, "service", "s", "", "service name") getFieldsCmd.Flags().StringVarP(&svcName, "service", "s", "", "service name")
getFieldsCmd.Flags().StringVarP(&resName, "resource", "r", "", "resource name") getFieldsCmd.Flags().StringVarP(&resName, "resource", "r", "", "resource name")
getFieldsCmd.Flags().StringVarP( getFieldsCmd.Flags().StringVarP(&resType, "type", "t", "resource", "resource type (data-source or resource [default])")
&resType,
"type",
"t",
"resource",
"resource type (data-source or resource [default])",
)
} }

View file

@ -35,27 +35,36 @@ type GpgPublicKey struct {
} }
func (p *Provider) CreateArchitectureFiles() error { func (p *Provider) CreateArchitectureFiles() error {
// var namespace, provider, distPath, repoName, version, gpgFingerprint, gpgPubKeyFile, domain string
log.Println("* Creating architecture files in target directories") log.Println("* Creating architecture files in target directories")
// filename = terraform-provider-[provider]_0.0.1_darwin_amd64.zip - provider_name + version + target + architecture + .zip
// prefix := fmt.Sprintf("v1/providers/%s/%s/%s/", namespace, provider, version)
prefix := path.Join("v1", "providers", p.Namespace, p.Provider, p.Version) prefix := path.Join("v1", "providers", p.Namespace, p.Provider, p.Version)
// pathPrefix := fmt.Sprintf("release/%s", prefix)
pathPrefix := path.Join("release", prefix) pathPrefix := path.Join("release", prefix)
// urlPrefix := fmt.Sprintf("https://%s/%s", domain, prefix)
urlPrefix, err := url.JoinPath("https://", p.Domain, prefix) urlPrefix, err := url.JoinPath("https://", p.Domain, prefix)
if err != nil { if err != nil {
return fmt.Errorf("error creating base url: %w", err) return fmt.Errorf("error creating base url: %w", err)
} }
// download url = https://example.com/v1/providers/namespace/provider/0.0.1/download/terraform-provider_0.0.1_darwin_amd64.zip
downloadUrlPrefix, err := url.JoinPath(urlPrefix, "download") downloadUrlPrefix, err := url.JoinPath(urlPrefix, "download")
if err != nil { if err != nil {
return fmt.Errorf("error crearting download url: %w", err) return fmt.Errorf("error crearting download url: %w", err)
} }
downloadPathPrefix := path.Join(pathPrefix, "download") downloadPathPrefix := path.Join(pathPrefix, "download")
// shasums url = https://example.com/v1/providers/namespace/provider/0.0.1/terraform-provider_0.0.1_SHA256SUMS
shasumsUrl, err := url.JoinPath(urlPrefix, fmt.Sprintf("%s_%s_SHA256SUMS", p.RepoName, p.Version)) shasumsUrl, err := url.JoinPath(urlPrefix, fmt.Sprintf("%s_%s_SHA256SUMS", p.RepoName, p.Version))
if err != nil { if err != nil {
return fmt.Errorf("error creating shasums url: %w", err) return fmt.Errorf("error creating shasums url: %w", err)
} }
// shasums_signature_url = https://example.com/v1/providers/namespace/provider/0.0.1/terraform-provider_0.0.1_SHA256SUMS.sig
shasumsSigUrl := shasumsUrl + ".sig" shasumsSigUrl := shasumsUrl + ".sig"
gpgAsciiPub, err := p.ReadGpgFile() gpgAsciiPub, err := p.ReadGpgFile()
@ -107,6 +116,33 @@ func (p *Provider) CreateArchitectureFiles() error {
}, },
}, },
} }
// var architectureTemplate = []byte(fmt.Sprintf(`
//{
// "protocols": [
// "4.0",
// "5.1",
// "6.0"
// ],
// "os": "%s",
// "arch": "%s",
// "filename": "%s",
// "download_url": "%s",
// "shasums_url": "%s",
// "shasums_signature_url": "%s",
// "shasum": "%s",
// "signing_keys": {
// "gpg_public_keys": [
// {
// "key_id": "%s",
// "ascii_armor": "%s",
// "trust_signature": "",
// "source": "",
// "source_url": ""
// }
// ]
// }
//}
//`, target, arch, fileName, downloadUrl, shasumsUrl, shasumsSigUrl, shasum, gpgFingerprint, gpgAsciiPub))
log.Printf(" - Arch file: %s", archFileName) log.Printf(" - Arch file: %s", archFileName)
@ -124,12 +160,8 @@ func WriteArchitectureFile(filePath string, arch Architecture) error {
if err != nil { if err != nil {
return fmt.Errorf("error encoding data: %w", err) return fmt.Errorf("error encoding data: %w", err)
} }
//nolint:gosec // this file is not sensitive, so we can use os.ModePerm
err = os.WriteFile( err = os.WriteFile(filePath, jsonString, os.ModePerm)
filePath,
jsonString,
os.ModePerm,
)
if err != nil { if err != nil {
return fmt.Errorf("error writing data: %w", err) return fmt.Errorf("error writing data: %w", err)
} }

View file

@ -161,12 +161,10 @@ func (p *Provider) createVersionsFile() error {
target := fileNameSplit[2] target := fileNameSplit[2]
arch := fileNameSplit[3] arch := fileNameSplit[3]
version.Platforms = append( version.Platforms = append(version.Platforms, Platform{
version.Platforms, Platform{
OS: target, OS: target,
Arch: arch, Arch: arch,
}, })
)
} }
data := Data{} data := Data{}
@ -208,19 +206,16 @@ func (p *Provider) CreateWellKnown() error {
log.Println("* Creating .well-known directory") log.Println("* Creating .well-known directory")
pathString := path.Join(p.RootPath, "release", ".well-known") pathString := path.Join(p.RootPath, "release", ".well-known")
//nolint:gosec // this file is not sensitive, so we can use ModePerm
err := os.MkdirAll(pathString, os.ModePerm) err := os.MkdirAll(pathString, os.ModePerm)
if err != nil && !errors.Is(err, fs.ErrExist) { if err != nil && !errors.Is(err, fs.ErrExist) {
return fmt.Errorf("error creating '%s' dir: %w", pathString, err) return fmt.Errorf("error creating '%s' dir: %w", pathString, err)
} }
log.Println(" - Writing to .well-known/terraform.json file") log.Println(" - Writing to .well-known/terraform.json file")
//nolint:gosec // this file is not sensitive, so we can use 0644
err = os.WriteFile( err = os.WriteFile(
fmt.Sprintf("%s/terraform.json", pathString), fmt.Sprintf("%s/terraform.json", pathString),
[]byte(`{"providers.v1": "/v1/providers/"}`), []byte(`{"providers.v1": "/v1/providers/"}`),
0o644, 0644,
) )
if err != nil { if err != nil {
return err return err
@ -229,10 +224,9 @@ func (p *Provider) CreateWellKnown() error {
return nil return nil
} }
func CreateDir(pathValue string) error { func CreateDir(path string) error {
log.Printf("* Creating %s directory", pathValue) log.Printf("* Creating %s directory", path)
//nolint:gosec // this file is not sensitive, so we can use ModePerm err := os.MkdirAll(path, os.ModePerm)
err := os.MkdirAll(pathValue, os.ModePerm)
if errors.Is(err, fs.ErrExist) { if errors.Is(err, fs.ErrExist) {
return nil return nil
} }
@ -275,23 +269,13 @@ func CopyFile(src, dst string) (int64, error) {
if err != nil { if err != nil {
return 0, err return 0, err
} }
defer func(source *os.File) { defer source.Close()
err := source.Close()
if err != nil {
slog.Error("error closing source file", slog.Any("err", err))
}
}(source)
destination, err := os.Create(dst) destination, err := os.Create(dst)
if err != nil { if err != nil {
return 0, err return 0, err
} }
defer func(destination *os.File) { defer destination.Close()
err := destination.Close()
if err != nil {
slog.Error("error closing destination file", slog.Any("err", err))
}
}(destination)
nBytes, err := io.Copy(destination, source) nBytes, err := io.Copy(destination, source)
return nBytes, err return nBytes, err
} }

View file

@ -35,12 +35,7 @@ func (d *Data) WriteToFile(filePath string) error {
return fmt.Errorf("error encoding data: %w", err) return fmt.Errorf("error encoding data: %w", err)
} }
//nolint:gosec // this file is not sensitive, so we can use os.ModePerm err = os.WriteFile(filePath, jsonString, os.ModePerm)
err = os.WriteFile(
filePath,
jsonString,
os.ModePerm,
)
if err != nil { if err != nil {
return fmt.Errorf("error writing data: %w", err) return fmt.Errorf("error writing data: %w", err)
} }
@ -91,13 +86,7 @@ func (d *Data) LoadFromUrl(uri string) error {
if err != nil { if err != nil {
return err return err
} }
defer func(name string) { defer os.Remove(file.Name()) // Clean up
//nolint:gosec // The file path is generated by os.CreateTemp and is not user-controllable
err := os.Remove(name)
if err != nil {
slog.Error("failed to remove temporary file", slog.Any("err", err))
}
}(file.Name()) // Clean up
err = DownloadFile( err = DownloadFile(
u.String(), u.String(),
@ -134,30 +123,20 @@ func (v *Version) AddProtocol(p string) error {
// DownloadFile will download a url and store it in local filepath. // DownloadFile will download a url and store it in local filepath.
// It writes to the destination file as it downloads it, without // It writes to the destination file as it downloads it, without
// loading the entire file into memory. // loading the entire file into memory.
func DownloadFile(urlValue, filepath string) error { func DownloadFile(url string, filepath string) error {
// Create the file // Create the file
//nolint:gosec // path traversal is not a concern here, as the filepath is generated by us and not user input
out, err := os.Create(filepath) out, err := os.Create(filepath)
if err != nil { if err != nil {
return err return err
} }
defer func(out *os.File) { defer out.Close()
err := out.Close()
if err != nil {
slog.Error("failed to close file", slog.Any("err", err))
}
}(out)
// Get the data // Get the data
resp, err := http.Get(url)
//nolint:gosec,bodyclose // this is a controlled URL, not user input
resp, err := http.Get(urlValue)
if err != nil { if err != nil {
return err return err
} }
defer func(Body io.ReadCloser) { defer resp.Body.Close()
_ = Body.Close()
}(resp.Body)
// Write the body to file // Write the body to file
_, err = io.Copy(out, resp.Body) _, err = io.Copy(out, resp.Body)

View file

@ -10,8 +10,7 @@ import (
"path/filepath" "path/filepath"
"github.com/spf13/cobra" "github.com/spf13/cobra"
publish2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/cmd/cmd/publish"
publish2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/generator/cmd/publish"
) )
var ( var (
@ -29,32 +28,20 @@ var publishCmd = &cobra.Command{
Use: "publish", Use: "publish",
Short: "Publish terraform provider", Short: "Publish terraform provider",
Long: `...`, Long: `...`,
RunE: func(_ *cobra.Command, _ []string) error { RunE: func(_ *cobra.Command, args []string) error {
return publish() return publish()
}, },
} }
func init() { //nolint:gochecknoinits //this is the standard way to set up cobra commands func init() { // nolint: gochecknoinits
publishCmd.Flags().StringVarP(&namespace, "namespace", "n", "", "Namespace for the Terraform registry.") publishCmd.Flags().StringVarP(&namespace, "namespace", "n", "", "Namespace for the Terraform registry.")
publishCmd.Flags().StringVarP(&domain, "domain", "d", "", "Domain for the Terraform registry.") publishCmd.Flags().StringVarP(&domain, "domain", "d", "", "Domain for the Terraform registry.")
publishCmd.Flags().StringVarP(&providerName, "providerName", "p", "", "ProviderName for the Terraform registry.") publishCmd.Flags().StringVarP(&providerName, "providerName", "p", "", "ProviderName for the Terraform registry.")
publishCmd.Flags().StringVarP(&distPath, "distPath", "x", "dist", "Dist Path for the Terraform registry.") publishCmd.Flags().StringVarP(&distPath, "distPath", "x", "dist", "Dist Path for the Terraform registry.")
publishCmd.Flags().StringVarP(&repoName, "repoName", "r", "", "RepoName for the Terraform registry.") publishCmd.Flags().StringVarP(&repoName, "repoName", "r", "", "RepoName for the Terraform registry.")
publishCmd.Flags().StringVarP(&version, "version", "v", "", "Version for the Terraform registry.") publishCmd.Flags().StringVarP(&version, "version", "v", "", "Version for the Terraform registry.")
publishCmd.Flags().StringVarP( publishCmd.Flags().StringVarP(&gpgFingerprint, "gpgFingerprint", "f", "", "GPG Fingerprint for the Terraform registry.")
&gpgFingerprint, publishCmd.Flags().StringVarP(&gpgPubKeyFile, "gpgPubKeyFile", "k", "", "GPG PubKey file name for the Terraform registry.")
"gpgFingerprint",
"f",
"",
"GPG Fingerprint for the Terraform registry.",
)
publishCmd.Flags().StringVarP(
&gpgPubKeyFile,
"gpgPubKeyFile",
"k",
"",
"GPG PubKey file name for the Terraform registry.",
)
err := publishCmd.MarkFlagRequired("namespace") err := publishCmd.MarkFlagRequired("namespace")
if err != nil { if err != nil {
@ -117,7 +104,6 @@ func publish() error {
// Create release dir - only the contents of this need to be uploaded to S3 // Create release dir - only the contents of this need to be uploaded to S3
log.Printf("* Creating release directory") log.Printf("* Creating release directory")
//nolint:gosec // this directory is not sensitive, so we can use 0750
err = os.MkdirAll(path.Join(p.RootPath, "release"), os.ModePerm) err = os.MkdirAll(path.Join(p.RootPath, "release"), os.ModePerm)
if err != nil && !errors.Is(err, fs.ErrExist) { if err != nil && !errors.Is(err, fs.ErrExist) {
return fmt.Errorf("error creating '%s' dir: %w", path.Join(p.RootPath, "release"), err) return fmt.Errorf("error creating '%s' dir: %w", path.Join(p.RootPath, "release"), err)

View file

@ -5,10 +5,9 @@ import (
"log/slog" "log/slog"
"os" "os"
"github.com/SladkyCitron/slogcolor" "github.com/MatusOllah/slogcolor"
cc "github.com/ivanpirog/coloredcobra" cc "github.com/ivanpirog/coloredcobra"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/cmd/cmd"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/generator/cmd"
) )
func main() { func main() {
@ -30,7 +29,6 @@ func main() {
cmd.NewBuildCmd(), cmd.NewBuildCmd(),
cmd.NewPublishCmd(), cmd.NewPublishCmd(),
cmd.NewGetFieldsCmd(), cmd.NewGetFieldsCmd(),
cmd.NewExamplesCmd(),
) )
err := rootCmd.Execute() err := rootCmd.Execute()

View file

@ -31,8 +31,8 @@ data "stackitprivatepreview_postgresflexalpha_instance" "example" {
### Read-Only ### Read-Only
- `acl` (List of String) List of IPV4 cidr. - `acl` (List of String) List of IPV4 cidr.
- `backup_schedule` (String) The schedule for when the database backup will be created. Currently, ONLY daily schedules are supported (every 24 hours). The schedule is written as a cron schedule. - `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
- `connection_info` (Attributes) The connection information of the instance (see [below for nested schema](#nestedatt--connection_info)) - `connection_info` (Attributes) The DNS name and port in the instance overview (see [below for nested schema](#nestedatt--connection_info))
- `encryption` (Attributes) The configuration for instance's volume and backup storage encryption. - `encryption` (Attributes) The configuration for instance's volume and backup storage encryption.
⚠︝ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected. (see [below for nested schema](#nestedatt--encryption)) ⚠︝ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected. (see [below for nested schema](#nestedatt--encryption))
@ -52,18 +52,10 @@ data "stackitprivatepreview_postgresflexalpha_instance" "example" {
Read-Only: Read-Only:
- `write` (Attributes) The DNS name and port in the instance overview (see [below for nested schema](#nestedatt--connection_info--write))
<a id="nestedatt--connection_info--write"></a>
### Nested Schema for `connection_info.write`
Read-Only:
- `host` (String) The host of the instance. - `host` (String) The host of the instance.
- `port` (Number) The port of the instance. - `port` (Number) The port of the instance.
<a id="nestedatt--encryption"></a> <a id="nestedatt--encryption"></a>
### Nested Schema for `encryption` ### Nested Schema for `encryption`

View file

@ -26,7 +26,7 @@ description: |-
- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint. - `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
- `compatibility_level` (Number) CompatibilityLevel of the Database. - `compatibility_level` (Number) CompatibilityLevel of the Database.
- `id` (String) The terraform internal identifier. - `id` (String) Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`,`database_id`\".",
- `name` (String) The name of the database. - `name` (String) The name of the database.
- `owner` (String) The owner of the database. - `owner` (String) The owner of the database.
- `tf_original_api_id` (Number) The id of the database. - `tf_original_api_id` (Number) The id of the database.

View file

@ -0,0 +1,54 @@
---
# generated by https://github.com/hashicorp/terraform-plugin-docs
page_title: "stackitprivatepreview_sqlserverflexalpha_flavor Data Source - stackitprivatepreview"
subcategory: ""
description: |-
---
# stackitprivatepreview_sqlserverflexalpha_flavor (Data Source)
## Example Usage
```terraform
data "stackitprivatepreview_sqlserverflexalpha_flavor" "flavor" {
project_id = var.project_id
region = var.region
cpu = 4
ram = 16
node_type = "Single"
storage_class = "premium-perf2-stackit"
}
```
<!-- schema generated by tfplugindocs -->
## Schema
### Required
- `cpu` (Number) The cpu count of the instance.
- `node_type` (String) defines the nodeType it can be either single or replica
- `project_id` (String) The cpu count of the instance.
- `ram` (Number) The memory of the instance in Gibibyte.
- `region` (String) The flavor description.
- `storage_class` (String) The memory of the instance in Gibibyte.
### Read-Only
- `description` (String) The flavor description.
- `flavor_id` (String) The flavor id of the instance flavor.
- `id` (String) The terraform id of the instance flavor.
- `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte.
- `min_gb` (Number) minimum storage which is required to order in Gigabyte.
- `storage_classes` (Attributes List) (see [below for nested schema](#nestedatt--storage_classes))
<a id="nestedatt--storage_classes"></a>
### Nested Schema for `storage_classes`
Read-Only:
- `class` (String)
- `max_io_per_sec` (Number)
- `max_through_in_mb` (Number)

View file

@ -34,6 +34,7 @@ data "stackitprivatepreview_sqlserverflexalpha_instance" "example" {
- `edition` (String) Edition of the MSSQL server instance - `edition` (String) Edition of the MSSQL server instance
- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption)) - `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption))
- `flavor_id` (String) The id of the instance flavor. - `flavor_id` (String) The id of the instance flavor.
- `id` (String) Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`\".
- `is_deletable` (Boolean) Whether the instance can be deleted or not. - `is_deletable` (Boolean) Whether the instance can be deleted or not.
- `name` (String) The name of the instance. - `name` (String) The name of the instance.
- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network)) - `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))

View file

@ -3,12 +3,12 @@
page_title: "stackitprivatepreview_sqlserverflexalpha_user Data Source - stackitprivatepreview" page_title: "stackitprivatepreview_sqlserverflexalpha_user Data Source - stackitprivatepreview"
subcategory: "" subcategory: ""
description: |- description: |-
SQLServer Flex user data source schema. Must have a region specified in the provider configuration.
--- ---
# stackitprivatepreview_sqlserverflexalpha_user (Data Source) # stackitprivatepreview_sqlserverflexalpha_user (Data Source)
SQLServer Flex user data source schema. Must have a `region` specified in the provider configuration.
## Example Usage ## Example Usage
@ -25,38 +25,20 @@ data "stackitprivatepreview_sqlserverflexalpha_user" "example" {
### Required ### Required
- `instance_id` (String) The ID of the instance. - `instance_id` (String) ID of the SQLServer Flex instance.
- `project_id` (String) The STACKIT project ID. - `project_id` (String) STACKIT project ID to which the instance is associated.
- `region` (String) The region which should be addressed - `user_id` (Number) User ID.
### Optional ### Optional
- `page` (Number) Number of the page of items list to be returned. - `region` (String) The resource region. If not defined, the provider region is used.
- `size` (Number) Number of items to be returned on each page.
- `sort` (String) Sorting of the users to be returned on each page.
### Read-Only ### Read-Only
- `pagination` (Attributes) (see [below for nested schema](#nestedatt--pagination)) - `default_database` (String)
- `users` (Attributes List) List of all users inside an instance (see [below for nested schema](#nestedatt--users)) - `host` (String)
- `id` (String) Terraform's internal data source. ID. It is structured as "`project_id`,`region`,`instance_id`,`user_id`".
<a id="nestedatt--pagination"></a> - `port` (Number)
### Nested Schema for `pagination` - `roles` (Set of String) Database access levels for the user.
- `status` (String)
Read-Only: - `username` (String) Username of the SQLServer Flex instance.
- `page` (Number)
- `size` (Number)
- `sort` (String)
- `total_pages` (Number)
- `total_rows` (Number)
<a id="nestedatt--users"></a>
### Nested Schema for `users`
Read-Only:
- `status` (String) The current status of the user.
- `tf_original_api_id` (Number) The ID of the user.
- `username` (String) The name of the user.

View file

@ -0,0 +1,54 @@
---
# generated by https://github.com/hashicorp/terraform-plugin-docs
page_title: "stackitprivatepreview_sqlserverflexbeta_flavor Data Source - stackitprivatepreview"
subcategory: ""
description: |-
---
# stackitprivatepreview_sqlserverflexbeta_flavor (Data Source)
## Example Usage
```terraform
data "stackitprivatepreview_sqlserverflexbeta_flavor" "flavor" {
project_id = var.project_id
region = var.region
cpu = 4
ram = 16
node_type = "Single"
storage_class = "premium-perf2-stackit"
}
```
<!-- schema generated by tfplugindocs -->
## Schema
### Required
- `cpu` (Number) The cpu count of the instance.
- `node_type` (String) defines the nodeType it can be either single or HA
- `project_id` (String) The project ID of the flavor.
- `ram` (Number) The memory of the instance in Gibibyte.
- `region` (String) The region of the flavor.
- `storage_class` (String) The memory of the instance in Gibibyte.
### Read-Only
- `description` (String) The flavor description.
- `flavor_id` (String) The id of the instance flavor.
- `id` (String) The id of the instance flavor.
- `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte.
- `min_gb` (Number) minimum storage which is required to order in Gigabyte.
- `storage_classes` (Attributes List) maximum storage which can be ordered for the flavor in Gigabyte. (see [below for nested schema](#nestedatt--storage_classes))
<a id="nestedatt--storage_classes"></a>
### Nested Schema for `storage_classes`
Read-Only:
- `class` (String)
- `max_io_per_sec` (Number)
- `max_through_in_mb` (Number)

View file

@ -1,54 +0,0 @@
---
# generated by https://github.com/hashicorp/terraform-plugin-docs
page_title: "stackitprivatepreview_sqlserverflexbeta_user Data Source - stackitprivatepreview"
subcategory: ""
description: |-
---
# stackitprivatepreview_sqlserverflexbeta_user (Data Source)
<!-- schema generated by tfplugindocs -->
## Schema
### Required
- `instance_id` (String) The ID of the instance.
- `project_id` (String) The STACKIT project ID.
- `region` (String) The region which should be addressed
### Optional
- `page` (Number) Number of the page of items list to be returned.
- `size` (Number) Number of items to be returned on each page.
- `sort` (String) Sorting of the users to be returned on each page.
### Read-Only
- `pagination` (Attributes) (see [below for nested schema](#nestedatt--pagination))
- `users` (Attributes List) List of all users inside an instance (see [below for nested schema](#nestedatt--users))
<a id="nestedatt--pagination"></a>
### Nested Schema for `pagination`
Read-Only:
- `page` (Number)
- `size` (Number)
- `sort` (String)
- `total_pages` (Number)
- `total_rows` (Number)
<a id="nestedatt--users"></a>
### Nested Schema for `users`
Read-Only:
- `status` (String) The current status of the user.
- `tf_original_api_id` (Number) The ID of the user.
- `username` (String) The name of the user.

View file

@ -25,16 +25,6 @@ import {
to = stackitprivatepreview_postgresflexalpha_database.import-example to = stackitprivatepreview_postgresflexalpha_database.import-example
id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.postgres_database_id}" id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.postgres_database_id}"
} }
import {
to = stackitprivatepreview_postgresflexalpha_database.import-example
identity = {
project_id = "project_id"
region = "region"
instance_id = "instance_id"
database_id = "database_id"
}
}
``` ```
<!-- schema generated by tfplugindocs --> <!-- schema generated by tfplugindocs -->

View file

@ -13,7 +13,7 @@ description: |-
## Example Usage ## Example Usage
```terraform ```terraform
resource "stackitprivatepreview_postgresflexalpha_instance" "example-instance" { resource "stackitprivatepreview_postgresflexalpha_instance" "msh-instance-only" {
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
name = "example-instance" name = "example-instance"
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"] acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
@ -59,7 +59,7 @@ import {
### Required ### Required
- `backup_schedule` (String) The schedule for when the database backup will be created. Currently, ONLY daily schedules are supported (every 24 hours). The schedule is written as a cron schedule. - `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
- `flavor_id` (String) The id of the instance flavor. - `flavor_id` (String) The id of the instance flavor.
- `name` (String) The name of the instance. - `name` (String) The name of the instance.
- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network)) - `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
@ -80,7 +80,7 @@ import {
### Read-Only ### Read-Only
- `acl` (List of String) List of IPV4 cidr. - `acl` (List of String) List of IPV4 cidr.
- `connection_info` (Attributes) The connection information of the instance (see [below for nested schema](#nestedatt--connection_info)) - `connection_info` (Attributes) The DNS name and port in the instance overview (see [below for nested schema](#nestedatt--connection_info))
- `id` (String) The ID of the instance. - `id` (String) The ID of the instance.
- `is_deletable` (Boolean) Whether the instance can be deleted or not. - `is_deletable` (Boolean) Whether the instance can be deleted or not.
- `status` (String) The current status of the instance. - `status` (String) The current status of the instance.
@ -127,12 +127,5 @@ Required:
Read-Only: Read-Only:
- `write` (Attributes) The DNS name and port in the instance overview (see [below for nested schema](#nestedatt--connection_info--write))
<a id="nestedatt--connection_info--write"></a>
### Nested Schema for `connection_info.write`
Read-Only:
- `host` (String) The host of the instance. - `host` (String) The host of the instance.
- `port` (Number) The port of the instance. - `port` (Number) The port of the instance.

View file

@ -16,7 +16,7 @@ description: |-
resource "stackitprivatepreview_postgresflexalpha_user" "example" { resource "stackitprivatepreview_postgresflexalpha_user" "example" {
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
name = "username" username = "username"
roles = ["role"] roles = ["role"]
} }
@ -25,16 +25,6 @@ import {
to = stackitprivatepreview_postgresflexalpha_user.import-example to = stackitprivatepreview_postgresflexalpha_user.import-example
id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.user_id}" id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.user_id}"
} }
import {
to = stackitprivatepreview_postgresflexalpha_user.import-example
identity = {
project_id = "project.id"
region = "region"
instance_id = "instance.id"
user_id = "user.id"
}
}
``` ```
<!-- schema generated by tfplugindocs --> <!-- schema generated by tfplugindocs -->
@ -54,6 +44,7 @@ import {
### Read-Only ### Read-Only
- `connection_string` (String) The connection string for the user to the instance.
- `id` (Number) The ID of the user. - `id` (Number) The ID of the user.
- `password` (String) The password for the user. - `password` (String) The password for the user.
- `status` (String) The current status of the user. - `status` (String) The current status of the user.

View file

@ -10,34 +10,7 @@ description: |-
## Example Usage
```terraform
resource "stackitprivatepreview_sqlserverflexalpha_database" "example" {
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
collation = ""
compatibility = "160"
name = ""
owner = ""
}
# Only use the import statement, if you want to import a existing sqlserverflex database
import {
to = stackitprivatepreview_sqlserverflexalpha_database.import-example
id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
}
import {
to = stackitprivatepreview_sqlserverflexalpha_database.import-example
identity = {
project_id = "project.id"
region = "region"
instance_id = "instance.id"
database_id = "database.id"
}
}
```
<!-- schema generated by tfplugindocs --> <!-- schema generated by tfplugindocs -->
## Schema ## Schema

View file

@ -32,7 +32,7 @@ import {
### Required ### Required
- `roles` (List of String) A list containing the user roles for the instance. A list with the valid user roles can be retrieved using the List Roles endpoint. - `roles` (List of String) A list containing the user roles for the instance.
- `username` (String) The name of the user. - `username` (String) The name of the user.
### Optional ### Optional

View file

@ -10,22 +10,7 @@ description: |-
## Example Usage
```terraform
resource "stackitprivatepreview_sqlserverflexalpha_user" "example" {
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
username = "username"
roles = ["role"]
}
# Only use the import statement, if you want to import an existing sqlserverflex user
import {
to = stackitprivatepreview_sqlserverflexalpha_user.import-example
id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
}
```
<!-- schema generated by tfplugindocs --> <!-- schema generated by tfplugindocs -->
## Schema ## Schema

View file

@ -1,53 +0,0 @@
---
# generated by https://github.com/hashicorp/terraform-plugin-docs
page_title: "stackitprivatepreview_sqlserverflexbeta_user Resource - stackitprivatepreview"
subcategory: ""
description: |-
---
# stackitprivatepreview_sqlserverflexbeta_user (Resource)
## Example Usage
```terraform
resource "stackitprivatepreview_sqlserverflexalpha_user" "example" {
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
username = "username"
roles = ["role"]
}
# Only use the import statement, if you want to import an existing sqlserverflex user
import {
to = stackitprivatepreview_sqlserverflexalpha_user.import-example
id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
}
```
<!-- schema generated by tfplugindocs -->
## Schema
### Required
- `roles` (List of String) A list containing the user roles for the instance. A list with the valid user roles can be retrieved using the List Roles endpoint.
- `username` (String) The name of the user.
### Optional
- `default_database` (String) The default database for a user of the instance.
- `instance_id` (String) The ID of the instance.
- `project_id` (String) The STACKIT project ID.
- `region` (String) The region which should be addressed
- `user_id` (Number) The ID of the user.
### Read-Only
- `host` (String) The host of the instance in which the user belongs to.
- `id` (Number) The ID of the user.
- `password` (String) The password for the user.
- `port` (Number) The port of the instance in which the user belongs to.
- `status` (String) The current status of the user.
- `uri` (String) The connection string for the user to the instance.

View file

@ -10,13 +10,3 @@ import {
to = stackitprivatepreview_postgresflexalpha_database.import-example to = stackitprivatepreview_postgresflexalpha_database.import-example
id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.postgres_database_id}" id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.postgres_database_id}"
} }
import {
to = stackitprivatepreview_postgresflexalpha_database.import-example
identity = {
project_id = "project_id"
region = "region"
instance_id = "instance_id"
database_id = "database_id"
}
}

View file

@ -1,4 +1,4 @@
resource "stackitprivatepreview_postgresflexalpha_instance" "example-instance" { resource "stackitprivatepreview_postgresflexalpha_instance" "msh-instance-only" {
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
name = "example-instance" name = "example-instance"
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"] acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]

View file

@ -10,13 +10,3 @@ import {
to = stackitprivatepreview_postgresflexalpha_user.import-example to = stackitprivatepreview_postgresflexalpha_user.import-example
id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.user_id}" id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.user_id}"
} }
import {
to = stackitprivatepreview_postgresflexalpha_user.import-example
identity = {
project_id = "project.id"
region = "region"
instance_id = "instance.id"
user_id = "user.id"
}
}

View file

@ -1,24 +0,0 @@
resource "stackitprivatepreview_sqlserverflexalpha_database" "example" {
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
collation = ""
compatibility = "160"
name = ""
owner = ""
}
# Only use the import statement, if you want to import a existing sqlserverflex database
import {
to = stackitprivatepreview_sqlserverflexalpha_database.import-example
id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
}
import {
to = stackitprivatepreview_sqlserverflexalpha_database.import-example
identity = {
project_id = "project.id"
region = "region"
instance_id = "instance.id"
database_id = "database.id"
}
}

View file

@ -1,12 +0,0 @@
resource "stackitprivatepreview_sqlserverflexalpha_user" "example" {
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
username = "username"
roles = ["role"]
}
# Only use the import statement, if you want to import an existing sqlserverflex user
import {
to = stackitprivatepreview_sqlserverflexalpha_user.import-example
id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
}

View file

@ -1,12 +0,0 @@
resource "stackitprivatepreview_sqlserverflexalpha_user" "example" {
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
username = "username"
roles = ["role"]
}
# Only use the import statement, if you want to import an existing sqlserverflex user
import {
to = stackitprivatepreview_sqlserverflexalpha_user.import-example
id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
}

View file

@ -1,346 +0,0 @@
package build
import (
"errors"
"fmt"
"go/ast"
"go/parser"
"go/token"
"log/slog"
"os"
"os/exec"
"path"
"regexp"
"strings"
)
type Builder struct {
rootDir string
SkipClone bool
SkipCleanup bool
PackagesOnly bool
Verbose bool
Debug bool
}
func (b *Builder) Build() error {
slog.Info("Starting Builder")
if b.PackagesOnly {
slog.Info(" >>> only generating pkg_gen <<<")
}
rootErr := b.determineRoot()
if rootErr != nil {
return rootErr
}
if !b.PackagesOnly {
if b.Verbose {
slog.Info(" ... Checking needed commands available")
}
chkErr := checkCommands([]string{})
if chkErr != nil {
return chkErr
}
}
// if !b.SkipCleanup {
// slog.Info("Cleaning up old packages directory")
// err := os.RemoveAll(path.Join(b.rootDir, "pkg_gen"))
// if err != nil {
// return err
// }
//}
//
// if !b.SkipCleanup && !b.PackagesOnly {
// slog.Info("Cleaning up old packages directory")
// err := os.RemoveAll(path.Join(b.rootDir, "pkg_gen"))
// if err != nil {
// return err
// }
//}
// slog.Info("Creating generator dir", "dir", fmt.Sprintf("%s/%s", *root, GEN_REPO_NAME))
// genDir := path.Join(*root, GEN_REPO_NAME)
// if !b.SkipClone {
// err = createGeneratorDir(GEN_REPO, genDir, b.SkipClone)
// if err != nil {
// return err
// }
//}
oasHandlerErr := b.oasHandler(path.Join(b.rootDir, "service_specs"))
if oasHandlerErr != nil {
return oasHandlerErr
}
// if !b.PackagesOnly {
// slog.Info("Generating service boilerplate")
// err = generateServiceFiles(*root, path.Join(*root, GEN_REPO_NAME))
// if err != nil {
// return err
// }
//
// slog.Info("Copying all service files")
// err = CopyDirectory(
// path.Join(*root, "generated", "internal", "services"),
// path.Join(*root, "stackit", "internal", "services"),
// )
// if err != nil {
// return err
// }
//
// err = createBoilerplate(*root, path.Join(*root, "stackit", "internal", "services"))
// if err != nil {
// return err
// }
//}
// workaround to remove linter complain :D
if b.PackagesOnly && b.Verbose && b.SkipClone && b.SkipCleanup {
bpErr := createBoilerplate(b.rootDir, "boilerplate")
if bpErr != nil {
return bpErr
}
}
slog.Info("Done")
return nil
}
type templateData struct {
PackageName string
PackageNameCamel string
PackageNamePascal string
NameCamel string
NamePascal string
NameSnake string
Fields []string
}
func createBoilerplate(rootFolder, folder string) error {
services, err := os.ReadDir(folder)
if err != nil {
return err
}
for _, svc := range services {
if !svc.IsDir() {
continue
}
resources, err := os.ReadDir(path.Join(folder, svc.Name()))
if err != nil {
return err
}
var handleDS bool
var handleRes bool
var foundDS bool
var foundRes bool
for _, res := range resources {
if !res.IsDir() {
continue
}
resourceName := res.Name()
dsFile := path.Join(
folder,
svc.Name(),
res.Name(),
"datasources_gen",
fmt.Sprintf("%s_data_source_gen.go", res.Name()),
)
handleDS = FileExists(dsFile)
resFile := path.Join(
folder,
svc.Name(),
res.Name(),
"resources_gen",
fmt.Sprintf("%s_resource_gen.go", res.Name()),
)
handleRes = FileExists(resFile)
dsGoFile := path.Join(folder, svc.Name(), res.Name(), "datasource.go")
foundDS = FileExists(dsGoFile)
resGoFile := path.Join(folder, svc.Name(), res.Name(), "resource.go")
foundRes = FileExists(resGoFile)
if handleDS && !foundDS {
slog.Info(" creating missing datasource.go", "service", svc.Name(), "resource", resourceName)
if !ValidateSnakeCase(resourceName) {
return errors.New("resource name is invalid")
}
fields, tokenErr := getTokens(dsFile)
if tokenErr != nil {
return fmt.Errorf("error reading tokens: %w", tokenErr)
}
tplName := "data_source_scaffold.gotmpl"
err = writeTemplateToFile(
tplName,
path.Join(rootFolder, "cmd", "cmd", "build", "templates", tplName),
dsGoFile,
&templateData{
PackageName: svc.Name(),
PackageNameCamel: ToCamelCase(svc.Name()),
PackageNamePascal: ToPascalCase(svc.Name()),
NameCamel: ToCamelCase(resourceName),
NamePascal: ToPascalCase(resourceName),
NameSnake: resourceName,
Fields: fields,
},
)
if err != nil {
panic(err)
}
}
if handleRes && !foundRes {
slog.Info(" creating missing resource.go", "service", svc.Name(), "resource", resourceName)
if !ValidateSnakeCase(resourceName) {
return errors.New("resource name is invalid")
}
fields, tokenErr := getTokens(resFile)
if tokenErr != nil {
return fmt.Errorf("error reading tokens: %w", tokenErr)
}
tplName := "resource_scaffold.gotmpl"
err = writeTemplateToFile(
tplName,
path.Join(rootFolder, "cmd", "cmd", "build", "templates", tplName),
resGoFile,
&templateData{
PackageName: svc.Name(),
PackageNameCamel: ToCamelCase(svc.Name()),
PackageNamePascal: ToPascalCase(svc.Name()),
NameCamel: ToCamelCase(resourceName),
NamePascal: ToPascalCase(resourceName),
NameSnake: resourceName,
Fields: fields,
},
)
if err != nil {
return err
}
if !FileExists(path.Join(folder, svc.Name(), res.Name(), "functions.go")) {
slog.Info(" creating missing functions.go", "service", svc.Name(), "resource", resourceName)
if !ValidateSnakeCase(resourceName) {
return errors.New("resource name is invalid")
}
fncTplName := "functions_scaffold.gotmpl"
err = writeTemplateToFile(
fncTplName,
path.Join(rootFolder, "cmd", "cmd", "build", "templates", fncTplName),
path.Join(folder, svc.Name(), res.Name(), "functions.go"),
&templateData{
PackageName: svc.Name(),
PackageNameCamel: ToCamelCase(svc.Name()),
PackageNamePascal: ToPascalCase(svc.Name()),
NameCamel: ToCamelCase(resourceName),
NamePascal: ToPascalCase(resourceName),
NameSnake: resourceName,
},
)
if err != nil {
return err
}
}
}
}
}
return nil
}
func handleLine(line string) (string, error) {
schemaRegex := regexp.MustCompile(`(\s+")(id)(": schema.[a-zA-Z0-9]+Attribute{)`)
schemaMatches := schemaRegex.FindAllStringSubmatch(line, -1)
if schemaMatches != nil {
return fmt.Sprintf("%stf_original_api_id%s", schemaMatches[0][1], schemaMatches[0][3]), nil
}
modelRegex := regexp.MustCompile(`(\s+Id\s+types.[a-zA-Z0-9]+\s+.tfsdk:")(id)(".)`)
modelMatches := modelRegex.FindAllStringSubmatch(line, -1)
if modelMatches != nil {
return fmt.Sprintf("%stf_original_api_id%s", modelMatches[0][1], modelMatches[0][3]), nil
}
return line, nil
}
func (b *Builder) determineRoot() error {
cmd := exec.Command("git", "rev-parse", "--show-toplevel")
out, err := cmd.Output()
if err != nil {
return err
}
lines := strings.Split(string(out), "\n")
if lines[0] == "" {
return fmt.Errorf("unable to determine root directory from git")
}
b.rootDir = lines[0]
if b.Verbose {
slog.Info(" ... using root", "dir", b.rootDir)
}
return nil
}
// func createGeneratorDir(repoUrl, targetDir string, skipClone bool) error {
// if !skipClone {
// if FileExists(targetDir) {
// remErr := os.RemoveAll(targetDir)
// if remErr != nil {
// return remErr
// }
// }
// _, cloneErr := git.Clone(
// clone.Repository(repoUrl),
// clone.Directory(targetDir),
// )
// if cloneErr != nil {
// return cloneErr
// }
// }
// return nil
//}
func getTokens(fileName string) ([]string, error) {
fset := token.NewFileSet()
var result []string
node, err := parser.ParseFile(fset, fileName, nil, parser.ParseComments)
if err != nil {
return nil, err
}
ast.Inspect(
node, func(n ast.Node) bool {
// Suche nach Typ-Deklarationen (structs)
ts, ok := n.(*ast.TypeSpec)
if ok {
if strings.Contains(ts.Name.Name, "Model") {
ast.Inspect(
ts, func(sn ast.Node) bool {
tts, tok := sn.(*ast.Field)
if tok {
result = append(result, tts.Names[0].String())
}
return true
},
)
}
}
return true
},
)
return result, nil
}

View file

@ -1,120 +0,0 @@
package build
import (
"fmt"
"log/slog"
"os"
"os/exec"
"strings"
"text/template"
)
func FileExists(pathValue string) bool {
_, err := os.Stat(pathValue)
if os.IsNotExist(err) {
return false
}
if err != nil {
panic(err)
}
return true
}
func ucfirst(s string) string {
if s == "" {
return ""
}
return strings.ToUpper(s[:1]) + s[1:]
}
func writeTemplateToFile(tplName, tplFile, outFile string, data *templateData) error {
fn := template.FuncMap{
"ucfirst": ucfirst,
}
tmpl, err := template.New(tplName).Funcs(fn).ParseFiles(tplFile)
if err != nil {
return err
}
var f *os.File
f, err = os.Create(outFile)
if err != nil {
return err
}
err = tmpl.Execute(f, *data)
if err != nil {
return err
}
err = f.Close()
if err != nil {
return err
}
return nil
}
/* saved for later
func deleteFiles(fNames ...string) error {
for _, fName := range fNames {
if _, err := os.Stat(fName); !os.IsNotExist(err) {
err = os.Remove(fName)
if err != nil {
return err
}
}
}
return nil
}
func copyFile(src, dst string) (int64, error) {
sourceFileStat, err := os.Stat(src)
if err != nil {
return 0, err
}
if !sourceFileStat.Mode().IsRegular() {
return 0, fmt.Errorf("%s is not a regular file", src)
}
source, err := os.Open(src)
if err != nil {
return 0, err
}
defer func(source *os.File) {
err := source.Close()
if err != nil {
slog.Error("copyFile", "err", err)
}
}(source)
destination, err := os.Create(dst)
if err != nil {
return 0, err
}
defer func(destination *os.File) {
err := destination.Close()
if err != nil {
slog.Error("copyFile", "err", err)
}
}(destination)
nBytes, err := io.Copy(destination, source)
return nBytes, err
}
*/
func checkCommands(commands []string) error {
for _, commandName := range commands {
if !commandExists(commandName) {
return fmt.Errorf("missing command %s", commandName)
}
slog.Info(" found", "command", commandName)
}
return nil
}
func commandExists(cmd string) bool {
_, err := exec.LookPath(cmd)
return err == nil
}

View file

@ -1,446 +0,0 @@
package build
import (
"bufio"
"bytes"
"errors"
"fmt"
"log"
"log/slog"
"os"
"os/exec"
"path"
"regexp"
"strings"
"gopkg.in/yaml.v3"
"github.com/ldez/go-git-cmd-wrapper/v2/clone"
"github.com/ldez/go-git-cmd-wrapper/v2/git"
)
const (
OasRepoName = "stackit-api-specifications"
OasRepo = "https://github.com/stackitcloud/stackit-api-specifications.git"
ResTypeResource = "resources"
ResTypeDataSource = "datasources"
)
type Data struct {
ServiceName string `yaml:",omitempty" json:",omitempty"`
Versions []Version `yaml:"versions" json:"versions"`
}
type Version struct {
Name string `yaml:"name" json:"name"`
Path string `yaml:"path" json:"path"`
}
var oasTempDir string
func (b *Builder) oasHandler(specDir string) error {
if b.Verbose {
slog.Info("creating schema files", "dir", specDir)
}
if _, err := os.Stat(specDir); os.IsNotExist(err) {
return fmt.Errorf("spec files directory does not exist")
}
err := b.createRepoDir(b.SkipClone)
if err != nil {
return fmt.Errorf("%s", err.Error())
}
err2 := b.handleServices(specDir)
if err2 != nil {
return err2
}
if !b.SkipCleanup {
if b.Verbose {
slog.Info("Finally removing temporary files and directories")
}
err := os.RemoveAll(path.Join(b.rootDir, "generated"))
if err != nil {
slog.Error("RemoveAll", "dir", path.Join(b.rootDir, "generated"), "err", err)
return err
}
err = os.RemoveAll(oasTempDir)
if err != nil {
slog.Error("RemoveAll", "dir", oasTempDir, "err", err)
return err
}
}
return nil
}
func (b *Builder) handleServices(specDir string) error {
services, err := os.ReadDir(specDir)
if err != nil {
return err
}
for _, svc := range services {
if !svc.IsDir() {
continue
}
if b.Verbose {
slog.Info(" ... found", "service", svc.Name())
}
var svcVersions Data
svcVersions.ServiceName = svc.Name()
versionsErr := b.getServiceVersions(path.Join(specDir, svc.Name(), "generator_settings.yml"), &svcVersions)
if versionsErr != nil {
return versionsErr
}
oasSpecErr := b.generateServiceFiles(&svcVersions)
if oasSpecErr != nil {
return oasSpecErr
}
}
return nil
}
func (b *Builder) getServiceVersions(confFile string, data *Data) error {
if _, cfgFileErr := os.Stat(confFile); os.IsNotExist(cfgFileErr) {
return fmt.Errorf("config file does not exist")
}
fileContent, fileErr := os.ReadFile(confFile)
if fileErr != nil {
return fileErr
}
convErr := yaml.Unmarshal(fileContent, &data)
if convErr != nil {
return convErr
}
return nil
}
func (b *Builder) createRepoDir(skipClone bool) error {
tmpDirName, err := os.MkdirTemp("", "oasbuild")
if err != nil {
return err
}
oasTempDir = path.Join(tmpDirName, OasRepoName)
slog.Info("Creating oas repo dir", "dir", oasTempDir)
if !skipClone {
if FileExists(oasTempDir) {
slog.Warn("target dir exists - skipping", "targetDir", oasTempDir)
return nil
}
out, cloneErr := git.Clone(
clone.Repository(OasRepo),
clone.Directory(oasTempDir),
)
if cloneErr != nil {
slog.Error("git clone error", "output", out)
return cloneErr
}
if b.Verbose {
slog.Info("git clone result", "output", out)
}
}
return nil
}
func (b *Builder) generateServiceFiles(data *Data) error {
err := os.MkdirAll(path.Join(b.rootDir, "generated", "specs"), 0o750)
if err != nil {
return err
}
for _, v := range data.Versions {
specFiles, specsErr := os.ReadDir(path.Join(b.rootDir, "service_specs", data.ServiceName, v.Name))
if specsErr != nil {
return specsErr
}
for _, specFile := range specFiles {
if specFile.IsDir() {
continue
}
r := regexp.MustCompile(`^(.*)_config.yml$`)
matches := r.FindAllStringSubmatch(specFile.Name(), -1)
if matches == nil {
slog.Warn(" skipping file (no regex match)", "file", specFile.Name())
continue
}
srcSpecFile := path.Join(b.rootDir, "service_specs", data.ServiceName, v.Name, specFile.Name())
if matches[0][0] != specFile.Name() {
return fmt.Errorf("matched filename differs from original filename - this should not happen")
}
resource := matches[0][1]
if b.Verbose {
slog.Info(
" found service spec",
"service",
data.ServiceName,
"resource",
resource,
"file",
specFile.Name(),
)
}
oasFile := path.Join(
oasTempDir,
"services",
data.ServiceName,
v.Path,
fmt.Sprintf("%s.json", data.ServiceName),
)
if _, oasErr := os.Stat(oasFile); os.IsNotExist(oasErr) {
slog.Warn(
" could not find matching oas",
"svc",
data.ServiceName,
"version",
v.Name,
)
continue
}
// determine correct target service name
scName := fmt.Sprintf("%s%s", data.ServiceName, v.Name)
scName = strings.ReplaceAll(scName, "-", "")
specJSONFile := path.Join(
b.rootDir,
"generated",
"specs",
fmt.Sprintf("%s_%s_spec.json", scName, resource),
)
cmdErr := b.runTerraformPluginGenOpenAPI(srcSpecFile, specJSONFile, oasFile)
if cmdErr != nil {
return cmdErr
}
cmdResGenErr := b.runTerraformPluginGenFramework(ResTypeResource, scName, resource, specJSONFile)
if cmdResGenErr != nil {
return cmdResGenErr
}
cmdDsGenErr := b.runTerraformPluginGenFramework(ResTypeDataSource, scName, resource, specJSONFile)
if cmdDsGenErr != nil {
return cmdDsGenErr
}
}
}
return nil
}
func (b *Builder) runTerraformPluginGenFramework(resType, svcName, resource, specJSONFile string) error {
var stdOut, stdErr bytes.Buffer
tgtFolder := path.Join(
b.rootDir,
"stackit",
"internal",
"services",
svcName,
resource,
fmt.Sprintf("%s_gen", resType),
)
//nolint:gosec // this file is not sensitive, so we can use 0755
err := os.MkdirAll(tgtFolder, 0o755)
if err != nil {
return err
}
var subCmd string
switch resType {
case ResTypeResource:
subCmd = "resources"
case ResTypeDataSource:
subCmd = "data-sources"
default:
return fmt.Errorf("unknown resource type given: %s", resType)
}
// nolint:gosec // #nosec this command is not using any untrusted input, so we can ignore gosec warning
cmd := exec.Command(
"tfplugingen-framework",
"generate",
subCmd,
"--input",
specJSONFile,
"--output",
tgtFolder,
"--package",
svcName,
)
cmd.Stdout = &stdOut
cmd.Stderr = &stdErr
if err = cmd.Start(); err != nil {
slog.Error(fmt.Sprintf("tfplugingen-framework generate %s", resType), "error", err)
return err
}
if err = cmd.Wait(); err != nil {
var exitErr *exec.ExitError
if errors.As(err, &exitErr) {
slog.Error(
fmt.Sprintf("tfplugingen-framework generate %s", resType),
"code",
exitErr.ExitCode(),
"error",
err,
"stdout",
stdOut.String(),
"stderr",
stdErr.String(),
)
return fmt.Errorf("%s", stdErr.String())
}
if err != nil {
slog.Error(
fmt.Sprintf("tfplugingen-framework generate %s", resType),
"err",
err,
"stdout",
stdOut.String(),
"stderr",
stdErr.String(),
)
return err
}
}
if resType == ResTypeDataSource {
tfAnoErr := b.handleTfTagForDatasourceFile(
path.Join(tgtFolder, fmt.Sprintf("%s_data_source_gen.go", resource)),
svcName,
resource,
)
if tfAnoErr != nil {
return tfAnoErr
}
}
return nil
}
func (b *Builder) runTerraformPluginGenOpenAPI(srcSpecFile, specJSONFile, oasFile string) error {
var stdOut, stdErr bytes.Buffer
// nolint:gosec // #nosec this command is not using any untrusted input, so we can ignore gosec warning
cmd := exec.Command(
"tfplugingen-openapi",
"generate",
"--config",
srcSpecFile,
"--output",
specJSONFile,
oasFile,
)
cmd.Stdout = &stdOut
cmd.Stderr = &stdErr
if err := cmd.Start(); err != nil {
slog.Error(
"tfplugingen-openapi generate",
"error",
err,
"stdOut",
stdOut.String(),
"stdErr",
stdErr.String(),
)
return err
}
if err := cmd.Wait(); err != nil {
var exitErr *exec.ExitError
if errors.As(err, &exitErr) {
slog.Error(
"tfplugingen-openapi generate",
"code",
exitErr.ExitCode(),
"error",
err,
"stdout",
stdOut.String(),
"stderr",
stdErr.String(),
)
return fmt.Errorf("%s", stdErr.String())
}
if err != nil {
slog.Error(
"tfplugingen-openapi generate",
"err",
err,
"stdout",
stdOut.String(),
"stderr",
stdErr.String(),
)
return err
}
}
if stdOut.Len() > 0 {
slog.Warn(" command output", "stdout", stdOut.String(), "stderr", stdErr.String())
}
return nil
}
// handleTfTagForDatasourceFile replaces existing "id" with "stf_original_api_id"
func (b *Builder) handleTfTagForDatasourceFile(filePath, service, resource string) error {
if b.Verbose {
slog.Info(" handle terraform tag for datasource", "service", service, "resource", resource)
}
if !FileExists(filePath) {
slog.Warn(" could not find file, skipping", "path", filePath)
return nil
}
f, err := os.Open(filePath)
if err != nil {
return err
}
tmp, err := os.CreateTemp(b.rootDir, "replace-*")
if err != nil {
return err
}
sc := bufio.NewScanner(f)
for sc.Scan() {
resLine, err := handleLine(sc.Text())
if err != nil {
return err
}
if _, err := tmp.WriteString(resLine + "\n"); err != nil {
return err
}
}
if scErr := sc.Err(); scErr != nil {
return scErr
}
if err := tmp.Close(); err != nil {
return err
}
if err := f.Close(); err != nil {
return err
}
//nolint:gosec // path traversal is not a concern here
if err := os.Rename(tmp.Name(), filePath); err != nil {
log.Fatal(err)
}
return nil
}

View file

@ -1,114 +0,0 @@
package cmd
import (
"fmt"
"os"
"path"
"github.com/spf13/cobra"
)
var examplesCmd = &cobra.Command{
Use: "examples",
Short: "create examples",
Long: `...`,
RunE: func(_ *cobra.Command, _ []string) error {
// filePathStr := "stackit/internal/services/postgresflexalpha/database/datasources_gen/database_data_source_gen.go"
//
// src, err := os.ReadFile(filePathStr)
// if err != nil {
// return err
//}
//
// i := interp.New(
// interp.Options{
// GoPath: "/home/henselinm/.asdf/installs/golang/1.25.6/packages",
// BuildTags: nil,
// Stdin: nil,
// Stdout: nil,
// Stderr: nil,
// Args: nil,
// Env: nil,
// SourcecodeFilesystem: nil,
// Unrestricted: false,
// },
//)
// err = i.Use(i.Symbols("github.com/hashicorp/terraform-plugin-framework-validators"))
// if err != nil {
// return err
//}
// err = i.Use(stdlib.Symbols)
// if err != nil {
// return err
//}
// _, err = i.Eval(string(src))
// if err != nil {
// return err
//}
//
// v, err := i.Eval("DatabaseDataSourceSchema")
// if err != nil {
// return err
//}
//
// bar := v.Interface().(func(string) string)
//
// r := bar("Kung")
// println(r)
//
// evalPath, err := i.EvalPath(filePathStr)
// if err != nil {
// return err
//}
//
// fmt.Printf("%+v\n", evalPath)
// _, err = i.Eval(`import "fmt"`)
// if err != nil {
// return err
//}
// _, err = i.Eval(`func Hallo() { fmt.Println("Hi!") }`)
// if err != nil {
// return err
//}
// v = i.Symbols("Hallo")
// fmt.Println(v)
return workServices()
},
}
func workServices() error {
startPath := path.Join("stackit", "internal", "services")
services, err := os.ReadDir(startPath)
if err != nil {
return err
}
for _, entry := range services {
if !entry.IsDir() {
continue
}
resources, err := os.ReadDir(path.Join(startPath, entry.Name()))
if err != nil {
return err
}
for _, res := range resources {
if !res.IsDir() {
continue
}
fmt.Println("Gefunden:", startPath, "subdir", entry.Name(), "resource", res.Name())
}
}
return nil
}
func NewExamplesCmd() *cobra.Command {
return examplesCmd
}
// func init() { // nolint: gochecknoinits
// examplesCmd.Flags().BoolVarP(&example, "example", "e", false, "example")
//}

View file

@ -1,38 +0,0 @@
{
log {
level debug
}
filesystem tf s3 {
bucket "terraform-provider-privatepreview"
region eu01
endpoint https://object.storage.eu01.onstackit.cloud
use_path_style
}
}
tfregistry.sysops.stackit.rocks {
encode zstd gzip
handle_path /docs/* {
root /srv/www
templates
@md {
file {path}
path *.md
}
rewrite @md /markdown.html
file_server {
browse
}
}
file_server {
fs tf
browse
}
}

257
go.mod
View file

@ -3,286 +3,85 @@ module tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stac
go 1.25.6 go 1.25.6
require ( require (
github.com/SladkyCitron/slogcolor v1.8.0 github.com/MatusOllah/slogcolor v1.7.0
github.com/golang-jwt/jwt/v5 v5.3.1
github.com/google/go-cmp v0.7.0 github.com/google/go-cmp v0.7.0
github.com/google/uuid v1.6.0 github.com/google/uuid v1.6.0
github.com/hashicorp/terraform-plugin-framework v1.18.0 github.com/hashicorp/terraform-plugin-framework v1.17.0
github.com/hashicorp/terraform-plugin-framework-validators v0.19.0 github.com/hashicorp/terraform-plugin-framework-validators v0.19.0
github.com/hashicorp/terraform-plugin-go v0.30.0 github.com/hashicorp/terraform-plugin-go v0.29.0
github.com/hashicorp/terraform-plugin-log v0.10.0 github.com/hashicorp/terraform-plugin-log v0.10.0
github.com/hashicorp/terraform-plugin-testing v1.14.0 github.com/hashicorp/terraform-plugin-testing v1.14.0
github.com/iancoleman/strcase v0.3.0 github.com/iancoleman/strcase v0.3.0
github.com/ivanpirog/coloredcobra v1.0.1 github.com/ivanpirog/coloredcobra v1.0.1
github.com/jarcoal/httpmock v1.4.1
github.com/joho/godotenv v1.5.1 github.com/joho/godotenv v1.5.1
github.com/ldez/go-git-cmd-wrapper/v2 v2.9.1 github.com/ldez/go-git-cmd-wrapper/v2 v2.9.1
github.com/spf13/cobra v1.10.2 github.com/spf13/cobra v1.10.2
github.com/stackitcloud/stackit-sdk-go/core v0.22.0 github.com/stackitcloud/stackit-sdk-go/core v0.21.0
github.com/stackitcloud/stackit-sdk-go/services/postgresflex v1.4.0 github.com/stackitcloud/stackit-sdk-go/services/iaasalpha v0.1.23-alpha
github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex v1.5.0 github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex v1.4.1
github.com/teambition/rrule-go v1.8.2 github.com/teambition/rrule-go v1.8.2
gopkg.in/yaml.v3 v3.0.1 gopkg.in/yaml.v3 v3.0.1
) )
require github.com/hashicorp/go-retryablehttp v0.7.8 // indirect require (
github.com/hashicorp/go-retryablehttp v0.7.8 // indirect
golang.org/x/telemetry v0.0.0-20260116145544-c6413dc483f5 // indirect
)
require ( require (
4d63.com/gocheckcompilerdirectives v1.3.0 // indirect
4d63.com/gochecknoglobals v0.2.2 // indirect
codeberg.org/chavacava/garif v0.2.0 // indirect
codeberg.org/polyfloyd/go-errorlint v1.9.0 // indirect
dario.cat/mergo v1.0.1 // indirect dario.cat/mergo v1.0.1 // indirect
dev.gaijin.team/go/exhaustruct/v4 v4.0.0 // indirect github.com/ProtonMail/go-crypto v1.3.0 // indirect
dev.gaijin.team/go/golib v0.6.0 // indirect
github.com/4meepo/tagalign v1.4.3 // indirect
github.com/Abirdcfly/dupword v0.1.7 // indirect
github.com/AdminBenni/iota-mixing v1.0.0 // indirect
github.com/AlwxSin/noinlineerr v1.0.5 // indirect
github.com/Antonboom/errname v1.1.1 // indirect
github.com/Antonboom/nilnil v1.1.1 // indirect
github.com/Antonboom/testifylint v1.6.4 // indirect
github.com/BurntSushi/toml v1.6.0 // indirect
github.com/Djarvur/go-err113 v0.1.1 // indirect
github.com/Kunde21/markdownfmt/v3 v3.1.0 // indirect
github.com/Masterminds/goutils v1.1.1 // indirect
github.com/Masterminds/semver/v3 v3.4.0 // indirect
github.com/Masterminds/sprig/v3 v3.2.3 // indirect
github.com/MirrexOne/unqueryvet v1.5.4 // indirect
github.com/OpenPeeDeeP/depguard/v2 v2.2.1 // indirect
github.com/ProtonMail/go-crypto v1.4.0 // indirect
github.com/agext/levenshtein v1.2.3 // indirect github.com/agext/levenshtein v1.2.3 // indirect
github.com/alecthomas/chroma/v2 v2.23.1 // indirect
github.com/alecthomas/go-check-sumtype v0.3.1 // indirect
github.com/alexkohler/nakedret/v2 v2.0.6 // indirect
github.com/alexkohler/prealloc v1.1.0 // indirect
github.com/alfatraining/structtag v1.0.0 // indirect
github.com/alingse/asasalint v0.0.11 // indirect
github.com/alingse/nilnesserr v0.2.0 // indirect
github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect
github.com/armon/go-radix v1.0.0 // indirect github.com/cloudflare/circl v1.6.2 // indirect
github.com/ashanbrown/forbidigo/v2 v2.3.0 // indirect
github.com/ashanbrown/makezero/v2 v2.1.0 // indirect
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/bgentry/speakeasy v0.1.0 // indirect
github.com/bkielbasa/cyclop v1.2.3 // indirect
github.com/blizzy78/varnamelen v0.8.0 // indirect
github.com/bmatcuk/doublestar/v4 v4.9.1 // indirect
github.com/bombsimon/wsl/v4 v4.7.0 // indirect
github.com/bombsimon/wsl/v5 v5.6.0 // indirect
github.com/breml/bidichk v0.3.3 // indirect
github.com/breml/errchkjson v0.4.1 // indirect
github.com/butuzov/ireturn v0.4.0 // indirect
github.com/butuzov/mirror v1.3.0 // indirect
github.com/catenacyber/perfsprint v0.10.1 // indirect
github.com/ccojocar/zxcvbn-go v1.0.4 // indirect
github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/charithe/durationcheck v0.0.11 // indirect
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc // indirect
github.com/charmbracelet/lipgloss v1.1.0 // indirect
github.com/charmbracelet/x/ansi v0.10.1 // indirect
github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd // indirect
github.com/charmbracelet/x/term v0.2.1 // indirect
github.com/ckaznocha/intrange v0.3.1 // indirect
github.com/cloudflare/circl v1.6.3 // indirect
github.com/curioswitch/go-reassign v0.3.0 // indirect
github.com/daixiang0/gci v0.13.7 // indirect
github.com/dave/dst v0.27.3 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/denis-tingaikin/go-header v0.5.0 // indirect
github.com/dlclark/regexp2 v1.11.5 // indirect
github.com/ettle/strcase v0.2.0 // indirect
github.com/fatih/color v1.18.0 // indirect github.com/fatih/color v1.18.0 // indirect
github.com/fatih/structtag v1.2.0 // indirect github.com/golang-jwt/jwt/v5 v5.3.0 // indirect
github.com/firefart/nonamedreturns v1.0.6 // indirect
github.com/fsnotify/fsnotify v1.5.4 // indirect
github.com/fzipp/gocyclo v0.6.0 // indirect
github.com/ghostiam/protogetter v0.3.20 // indirect
github.com/go-critic/go-critic v0.14.3 // indirect
github.com/go-toolsmith/astcast v1.1.0 // indirect
github.com/go-toolsmith/astcopy v1.1.0 // indirect
github.com/go-toolsmith/astequal v1.2.0 // indirect
github.com/go-toolsmith/astfmt v1.1.0 // indirect
github.com/go-toolsmith/astp v1.1.0 // indirect
github.com/go-toolsmith/strparse v1.1.0 // indirect
github.com/go-toolsmith/typep v1.1.0 // indirect
github.com/go-viper/mapstructure/v2 v2.5.0 // indirect
github.com/go-xmlfmt/xmlfmt v1.1.3 // indirect
github.com/gobwas/glob v0.2.3 // indirect
github.com/godoc-lint/godoc-lint v0.11.2 // indirect
github.com/gofrs/flock v0.13.0 // indirect
github.com/golang/protobuf v1.5.4 // indirect github.com/golang/protobuf v1.5.4 // indirect
github.com/golangci/asciicheck v0.5.0 // indirect
github.com/golangci/dupl v0.0.0-20250308024227-f665c8d69b32 // indirect
github.com/golangci/go-printf-func-name v0.1.1 // indirect
github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d // indirect
github.com/golangci/golangci-lint/v2 v2.11.2 // indirect
github.com/golangci/golines v0.15.0 // indirect
github.com/golangci/misspell v0.8.0 // indirect
github.com/golangci/plugin-module-register v0.1.2 // indirect
github.com/golangci/revgrep v0.8.0 // indirect
github.com/golangci/swaggoswag v0.0.0-20250504205917-77f2aca3143e // indirect
github.com/golangci/unconvert v0.0.0-20250410112200-a129a6e6413e // indirect
github.com/gordonklaus/ineffassign v0.2.0 // indirect
github.com/gostaticanalysis/analysisutil v0.7.1 // indirect
github.com/gostaticanalysis/comment v1.5.0 // indirect
github.com/gostaticanalysis/forcetypeassert v0.2.0 // indirect
github.com/gostaticanalysis/nilerr v0.1.2 // indirect
github.com/hashicorp/cli v1.1.7 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/go-checkpoint v0.5.0 // indirect github.com/hashicorp/go-checkpoint v0.5.0 // indirect
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
github.com/hashicorp/go-cty v1.5.0 // indirect github.com/hashicorp/go-cty v1.5.0 // indirect
github.com/hashicorp/go-hclog v1.6.3 // indirect github.com/hashicorp/go-hclog v1.6.3 // indirect
github.com/hashicorp/go-immutable-radix/v2 v2.1.0 // indirect
github.com/hashicorp/go-multierror v1.1.1 // indirect github.com/hashicorp/go-multierror v1.1.1 // indirect
github.com/hashicorp/go-plugin v1.7.0 // indirect github.com/hashicorp/go-plugin v1.7.0 // indirect
github.com/hashicorp/go-uuid v1.0.3 // indirect github.com/hashicorp/go-uuid v1.0.3 // indirect
github.com/hashicorp/go-version v1.8.0 // indirect github.com/hashicorp/go-version v1.8.0 // indirect
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect github.com/hashicorp/hc-install v0.9.2 // indirect
github.com/hashicorp/hc-install v0.9.3 // indirect
github.com/hashicorp/hcl v1.0.0 // indirect
github.com/hashicorp/hcl/v2 v2.24.0 // indirect github.com/hashicorp/hcl/v2 v2.24.0 // indirect
github.com/hashicorp/logutils v1.0.0 // indirect github.com/hashicorp/logutils v1.0.0 // indirect
github.com/hashicorp/terraform-exec v0.25.0 // indirect github.com/hashicorp/terraform-exec v0.24.0 // indirect
github.com/hashicorp/terraform-json v0.27.2 // indirect github.com/hashicorp/terraform-json v0.27.2 // indirect
github.com/hashicorp/terraform-plugin-docs v0.24.0 // indirect github.com/hashicorp/terraform-plugin-sdk/v2 v2.38.1 // indirect
github.com/hashicorp/terraform-plugin-sdk/v2 v2.39.0 // indirect
github.com/hashicorp/terraform-registry-address v0.4.0 // indirect github.com/hashicorp/terraform-registry-address v0.4.0 // indirect
github.com/hashicorp/terraform-svchost v0.2.1 // indirect github.com/hashicorp/terraform-svchost v0.2.0 // indirect
github.com/hashicorp/yamux v0.1.2 // indirect github.com/hashicorp/yamux v0.1.2 // indirect
github.com/hexops/gotextdiff v1.0.3 // indirect
github.com/huandu/xstrings v1.3.3 // indirect
github.com/imdario/mergo v0.3.15 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/jgautheron/goconst v1.8.2 // indirect
github.com/jingyugao/rowserrcheck v1.1.1 // indirect
github.com/jjti/go-spancheck v0.6.5 // indirect
github.com/julz/importas v0.2.0 // indirect
github.com/karamaru-alpha/copyloopvar v1.2.2 // indirect
github.com/kisielk/errcheck v1.10.0 // indirect
github.com/kkHAIKE/contextcheck v1.1.6 // indirect
github.com/kr/text v0.2.0 // indirect github.com/kr/text v0.2.0 // indirect
github.com/kulti/thelper v0.7.1 // indirect
github.com/kunwardeep/paralleltest v1.0.15 // indirect
github.com/lasiar/canonicalheader v1.1.2 // indirect
github.com/ldez/exptostd v0.4.5 // indirect
github.com/ldez/gomoddirectives v0.8.0 // indirect
github.com/ldez/grignotin v0.10.1 // indirect
github.com/ldez/structtags v0.6.1 // indirect
github.com/ldez/tagliatelle v0.7.2 // indirect
github.com/ldez/usetesting v0.5.0 // indirect
github.com/leonklingele/grouper v1.1.2 // indirect
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
github.com/macabu/inamedparam v0.2.0 // indirect
github.com/magiconair/properties v1.8.6 // indirect
github.com/manuelarte/embeddedstructfieldcheck v0.4.0 // indirect
github.com/manuelarte/funcorder v0.5.0 // indirect
github.com/maratori/testableexamples v1.0.1 // indirect
github.com/maratori/testpackage v1.1.2 // indirect
github.com/matoous/godox v1.1.0 // indirect
github.com/mattn/go-colorable v0.1.14 // indirect github.com/mattn/go-colorable v0.1.14 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-runewidth v0.0.16 // indirect
github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect
github.com/mgechev/revive v1.15.0 // indirect
github.com/mitchellh/copystructure v1.2.0 // indirect github.com/mitchellh/copystructure v1.2.0 // indirect
github.com/mitchellh/go-homedir v1.1.0 // indirect
github.com/mitchellh/go-testing-interface v1.14.1 // indirect github.com/mitchellh/go-testing-interface v1.14.1 // indirect
github.com/mitchellh/go-wordwrap v1.0.1 // indirect github.com/mitchellh/go-wordwrap v1.0.1 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/mitchellh/reflectwalk v1.0.2 // indirect github.com/mitchellh/reflectwalk v1.0.2 // indirect
github.com/moricho/tparallel v0.3.2 // indirect
github.com/muesli/termenv v0.16.0 // indirect
github.com/nakabonne/nestif v0.3.1 // indirect
github.com/nishanths/exhaustive v0.12.0 // indirect
github.com/nishanths/predeclared v0.2.2 // indirect
github.com/nunnatsa/ginkgolinter v0.23.0 // indirect
github.com/oklog/run v1.2.0 // indirect github.com/oklog/run v1.2.0 // indirect
github.com/pelletier/go-toml v1.9.5 // indirect
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/posener/complete v1.2.3 // indirect
github.com/prometheus/client_golang v1.12.1 // indirect
github.com/prometheus/client_model v0.2.0 // indirect
github.com/prometheus/common v0.32.1 // indirect
github.com/prometheus/procfs v0.7.3 // indirect
github.com/quasilyte/go-ruleguard v0.4.5 // indirect
github.com/quasilyte/go-ruleguard/dsl v0.3.23 // indirect
github.com/quasilyte/gogrep v0.5.0 // indirect
github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727 // indirect
github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 // indirect
github.com/raeperd/recvcheck v0.2.0 // indirect
github.com/rivo/uniseg v0.4.7 // indirect
github.com/rogpeppe/go-internal v1.14.1 // indirect
github.com/ryancurrah/gomodguard v1.4.1 // indirect
github.com/ryanrolds/sqlclosecheck v0.5.1 // indirect
github.com/sanposhiho/wastedassign/v2 v2.1.0 // indirect
github.com/santhosh-tekuri/jsonschema/v6 v6.0.2 // indirect
github.com/sashamelentyev/interfacebloat v1.1.0 // indirect
github.com/sashamelentyev/usestdlibvars v1.29.0 // indirect
github.com/securego/gosec/v2 v2.24.7 // indirect
github.com/shopspring/decimal v1.3.1 // indirect
github.com/sirupsen/logrus v1.9.4 // indirect
github.com/sivchari/containedctx v1.0.3 // indirect
github.com/sonatard/noctx v0.5.0 // indirect
github.com/sourcegraph/go-diff v0.7.0 // indirect
github.com/spf13/afero v1.15.0 // indirect
github.com/spf13/cast v1.5.0 // indirect
github.com/spf13/jwalterweatherman v1.1.0 // indirect
github.com/spf13/pflag v1.0.10 // indirect github.com/spf13/pflag v1.0.10 // indirect
github.com/spf13/viper v1.12.0 // indirect
github.com/ssgreg/nlreturn/v2 v2.2.1 // indirect
github.com/stbenjam/no-sprintf-host-port v0.3.1 // indirect
github.com/stretchr/objx v0.5.2 // indirect
github.com/stretchr/testify v1.11.1 // indirect github.com/stretchr/testify v1.11.1 // indirect
github.com/subosito/gotenv v1.4.1 // indirect
github.com/tetafro/godot v1.5.4 // indirect
github.com/timakin/bodyclose v0.0.0-20241222091800-1db5c5ca4d67 // indirect
github.com/timonwong/loggercheck v0.11.0 // indirect
github.com/tomarrell/wrapcheck/v2 v2.12.0 // indirect
github.com/tommy-muehle/go-mnd/v2 v2.5.1 // indirect
github.com/ultraware/funlen v0.2.0 // indirect
github.com/ultraware/whitespace v0.2.0 // indirect
github.com/uudashr/gocognit v1.2.1 // indirect
github.com/uudashr/iface v1.4.1 // indirect
github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect
github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
github.com/xen0n/gosmopolitan v1.3.0 // indirect github.com/zclconf/go-cty v1.17.0 // indirect
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect golang.org/x/crypto v0.47.0 // indirect
github.com/yagipy/maintidx v1.0.0 // indirect golang.org/x/mod v0.32.0 // indirect
github.com/yeya24/promlinter v0.3.0 // indirect golang.org/x/net v0.49.0 // indirect
github.com/ykadowak/zerologlint v0.1.5 // indirect
github.com/yuin/goldmark v1.7.7 // indirect
github.com/yuin/goldmark-meta v1.1.0 // indirect
github.com/zclconf/go-cty v1.18.0 // indirect
gitlab.com/bosi/decorder v0.4.2 // indirect
go-simpler.org/musttag v0.14.0 // indirect
go-simpler.org/sloglint v0.11.1 // indirect
go.abhg.dev/goldmark/frontmatter v0.2.0 // indirect
go.augendre.info/arangolint v0.4.0 // indirect
go.augendre.info/fatcontext v0.9.0 // indirect
go.uber.org/multierr v1.10.0 // indirect
go.uber.org/zap v1.27.0 // indirect
go.yaml.in/yaml/v3 v3.0.4 // indirect
golang.org/x/crypto v0.48.0 // indirect
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b // indirect
golang.org/x/exp/typeparams v0.0.0-20260209203927-2842357ff358 // indirect
golang.org/x/mod v0.33.0 // indirect
golang.org/x/net v0.51.0 // indirect
golang.org/x/sync v0.19.0 // indirect golang.org/x/sync v0.19.0 // indirect
golang.org/x/sys v0.41.0 // indirect golang.org/x/sys v0.40.0 // indirect
golang.org/x/text v0.34.0 // indirect golang.org/x/text v0.33.0 // indirect
golang.org/x/tools v0.42.0 // indirect golang.org/x/tools v0.41.0 // indirect
google.golang.org/appengine v1.6.8 // indirect google.golang.org/appengine v1.6.8 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20260226221140-a57be14db171 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20260120221211-b8f7ae30c516 // indirect
google.golang.org/grpc v1.79.2 // indirect google.golang.org/grpc v1.78.0 // indirect
google.golang.org/protobuf v1.36.11 // indirect google.golang.org/protobuf v1.36.11 // indirect
gopkg.in/ini.v1 v1.67.0 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
honnef.co/go/tools v0.7.0 // indirect
mvdan.cc/gofumpt v0.9.2 // indirect
mvdan.cc/unparam v0.0.0-20251027182757-5beb8c8f8f15 // indirect
) )
tool golang.org/x/tools/cmd/goimports

1028
go.sum

File diff suppressed because it is too large Load diff

View file

@ -1,13 +1,7 @@
version: "2" version: "2"
run: run:
concurrency: 4 concurrency: 4
output:
formats:
text:
print-linter-name: true
print-issued-lines: true
colors: true
path: stdout
linters: linters:
enable: enable:
- bodyclose - bodyclose
@ -30,11 +24,6 @@ linters:
rules: rules:
main: main:
list-mode: lax list-mode: lax
allow:
- tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview
- github.com/hashicorp/terraform-plugin-framework
- github.com/hashicorp/terraform-plugin-log
- github.com/stackitcloud/stackit-sdk-go
deny: deny:
- pkg: github.com/stretchr/testify - pkg: github.com/stretchr/testify
desc: Do not use a testing framework desc: Do not use a testing framework
@ -74,21 +63,24 @@ linters:
- name: empty-lines - name: empty-lines
- name: early-return - name: early-return
exclusions: exclusions:
paths:
- generator/
generated: lax generated: lax
warn-unused: true paths:
# Excluding configuration per-path, per-linter, per-text and per-source. - third_party$
rules: - builtin$
# Exclude some linters from running on tests files. - examples$
- path: _test\.go - tools/copy.go
linters: - tools/main.go
- gochecknoinits
formatters: formatters:
enable: enable:
#- gofmt - gofmt
- goimports - goimports
settings: settings:
goimports: goimports:
local-prefixes: local-prefixes:
- tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview - github.com/freiheit-com/nmww
exclusions:
generated: lax
paths:
- third_party$
- builtin$
- examples$

View file

@ -1,97 +0,0 @@
version: "2"
run:
concurrency: 4
output:
formats:
text:
print-linter-name: true
print-issued-lines: true
colors: true
path: stdout
linters:
enable:
- bodyclose
- depguard
- errorlint
- forcetypeassert
- gochecknoinits
- gocritic
- gosec
- misspell
- nakedret
- revive
- sqlclosecheck
- wastedassign
disable:
- noctx
- unparam
settings:
depguard:
rules:
main:
list-mode: lax
allow:
- tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview
- github.com/hashicorp/terraform-plugin-framework
- github.com/hashicorp/terraform-plugin-log
- github.com/stackitcloud/stackit-sdk-go
deny:
- pkg: github.com/stretchr/testify
desc: Do not use a testing framework
gocritic:
disabled-checks:
- wrapperFunc
- typeDefFirst
- ifElseChain
- dupImport
- hugeParam
enabled-tags:
- performance
- style
- experimental
gosec:
excludes:
- G104
- G102
- G304
- G307
misspell:
locale: US
nakedret:
max-func-lines: 0
revive:
severity: error
rules:
- name: errorf
- name: context-as-argument
- name: error-return
- name: increment-decrement
- name: indent-error-flow
- name: superfluous-else
- name: unused-parameter
- name: unreachable-code
- name: atomic
- name: empty-lines
- name: early-return
exclusions:
paths:
- stackit-sdk-generator/
- generated/
- pkg_gen/
generated: lax
warn-unused: true
# Excluding configuration per-path, per-linter, per-text and per-source.
rules:
# Exclude some linters from running on tests files.
- path: _test\.go
linters:
- gochecknoinits
formatters:
enable:
- gofmt
- goimports
settings:
goimports:
local-prefixes:
- tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview

View file

@ -0,0 +1,11 @@
package testutil
import "testing"
func Equal[V comparable](t *testing.T, got, expected V) {
t.Helper()
if expected != got {
t.Errorf("assert equal failed:\ngot: %v \nexpected: %v", got, expected)
}
}

View file

@ -0,0 +1,651 @@
package testutil
import (
"fmt"
"os"
"path/filepath"
"strings"
"time"
"github.com/hashicorp/terraform-plugin-framework/providerserver"
"github.com/hashicorp/terraform-plugin-go/tfprotov6"
"github.com/hashicorp/terraform-plugin-testing/config"
"github.com/hashicorp/terraform-plugin-testing/echoprovider"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit"
)
const (
// Default location of credentials JSON
// credentialsFilePath = ".stackit/credentials.json" //nolint:gosec // linter false positive
serviceAccountFilePath = ".stackit/service_account.json"
)
var (
// TestAccProtoV6ProviderFactories is used to instantiate a provider during
// acceptance testing. The factory function will be invoked for every Terraform
// CLI command executed to create a provider server to which the CLI can
// reattach.
TestAccProtoV6ProviderFactories = map[string]func() (tfprotov6.ProviderServer, error){
"stackitprivatepreview": providerserver.NewProtocol6WithError(stackit.New("test-version")()),
}
// TestEphemeralAccProtoV6ProviderFactories is used to instantiate a provider during
// acceptance testing. The factory function will be invoked for every Terraform
// CLI command executed to create a provider server to which the CLI can
// reattach.
//
// See the Terraform acceptance test documentation on ephemeral resources for more information:
// https://developer.hashicorp.com/terraform/plugin/testing/acceptance-tests/ephemeral-resources
TestEphemeralAccProtoV6ProviderFactories = map[string]func() (tfprotov6.ProviderServer, error){
"stackitprivatepreview": providerserver.NewProtocol6WithError(stackit.New("test-version")()),
"echo": echoprovider.NewProviderServer(),
}
// E2ETestsEnabled checks if end-to-end tests should be run.
// It is enabled when the TF_ACC environment variable is set to "1".
E2ETestsEnabled = os.Getenv("TF_ACC") == "1"
// OrganizationId is the id of organization used for tests
OrganizationId = os.Getenv("TF_ACC_ORGANIZATION_ID")
// ProjectId is the id of project used for tests
ProjectId = os.Getenv("TF_ACC_PROJECT_ID")
Region = os.Getenv("TF_ACC_REGION")
// ServiceAccountFile is the json file of the service account
ServiceAccountFile = os.Getenv("TF_ACC_SERVICE_ACCOUNT_FILE")
// ServerId is the id of a server used for some tests
ServerId = getenv("TF_ACC_SERVER_ID", "")
// TestProjectParentContainerID is the container id of the parent resource under which projects are created as part of the resource-manager acceptance tests
TestProjectParentContainerID = os.Getenv("TF_ACC_TEST_PROJECT_PARENT_CONTAINER_ID")
// TestProjectParentUUID is the uuid of the parent resource under which projects are created as part of the resource-manager acceptance tests
TestProjectParentUUID = os.Getenv("TF_ACC_TEST_PROJECT_PARENT_UUID")
// TestProjectServiceAccountEmail is the e-mail of a service account with admin permissions on the organization under which projects are created as part of the resource-manager acceptance tests
TestProjectServiceAccountEmail = os.Getenv("TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL")
// TestProjectUserEmail is the e-mail of a user for the project created as part of the resource-manager acceptance tests
// Default email: acc-test@sa.stackit.cloud
TestProjectUserEmail = getenv("TF_ACC_TEST_PROJECT_USER_EMAIL", "acc-test@sa.stackit.cloud")
// TestImageLocalFilePath is the local path to an image file used for image acceptance tests
TestImageLocalFilePath = getenv("TF_ACC_TEST_IMAGE_LOCAL_FILE_PATH", "default")
CdnCustomEndpoint = os.Getenv("TF_ACC_CDN_CUSTOM_ENDPOINT")
DnsCustomEndpoint = os.Getenv("TF_ACC_DNS_CUSTOM_ENDPOINT")
GitCustomEndpoint = os.Getenv("TF_ACC_GIT_CUSTOM_ENDPOINT")
IaaSCustomEndpoint = os.Getenv("TF_ACC_IAAS_CUSTOM_ENDPOINT")
KMSCustomEndpoint = os.Getenv("TF_ACC_KMS_CUSTOM_ENDPOINT")
LoadBalancerCustomEndpoint = os.Getenv("TF_ACC_LOADBALANCER_CUSTOM_ENDPOINT")
LogMeCustomEndpoint = os.Getenv("TF_ACC_LOGME_CUSTOM_ENDPOINT")
MariaDBCustomEndpoint = os.Getenv("TF_ACC_MARIADB_CUSTOM_ENDPOINT")
ModelServingCustomEndpoint = os.Getenv("TF_ACC_MODELSERVING_CUSTOM_ENDPOINT")
AuthorizationCustomEndpoint = os.Getenv("TF_ACC_authorization_custom_endpoint")
MongoDBFlexCustomEndpoint = os.Getenv("TF_ACC_MONGODBFLEX_CUSTOM_ENDPOINT")
OpenSearchCustomEndpoint = os.Getenv("TF_ACC_OPENSEARCH_CUSTOM_ENDPOINT")
ObservabilityCustomEndpoint = os.Getenv("TF_ACC_OBSERVABILITY_CUSTOM_ENDPOINT")
ObjectStorageCustomEndpoint = os.Getenv("TF_ACC_OBJECTSTORAGE_CUSTOM_ENDPOINT")
PostgresFlexCustomEndpoint = os.Getenv("TF_ACC_POSTGRESFLEX_CUSTOM_ENDPOINT")
RabbitMQCustomEndpoint = os.Getenv("TF_ACC_RABBITMQ_CUSTOM_ENDPOINT")
RedisCustomEndpoint = os.Getenv("TF_ACC_REDIS_CUSTOM_ENDPOINT")
ResourceManagerCustomEndpoint = os.Getenv("TF_ACC_RESOURCEMANAGER_CUSTOM_ENDPOINT")
ScfCustomEndpoint = os.Getenv("TF_ACC_SCF_CUSTOM_ENDPOINT")
SecretsManagerCustomEndpoint = os.Getenv("TF_ACC_SECRETSMANAGER_CUSTOM_ENDPOINT")
SQLServerFlexCustomEndpoint = os.Getenv("TF_ACC_SQLSERVERFLEX_CUSTOM_ENDPOINT")
ServerBackupCustomEndpoint = os.Getenv("TF_ACC_SERVER_BACKUP_CUSTOM_ENDPOINT")
ServerUpdateCustomEndpoint = os.Getenv("TF_ACC_SERVER_UPDATE_CUSTOM_ENDPOINT")
ServiceAccountCustomEndpoint = os.Getenv("TF_ACC_SERVICE_ACCOUNT_CUSTOM_ENDPOINT")
SKECustomEndpoint = os.Getenv("TF_ACC_SKE_CUSTOM_ENDPOINT")
)
// Provider config helper functions
func ObservabilityProviderConfig() string {
if ObservabilityCustomEndpoint == "" {
return `provider "stackitprivatepreview" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
observability_custom_endpoint = "%s"
}`,
ObservabilityCustomEndpoint,
)
}
func CdnProviderConfig() string {
if CdnCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
enable_beta_resources = true
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
cdn_custom_endpoint = "%s"
enable_beta_resources = true
}`,
CdnCustomEndpoint,
)
}
func DnsProviderConfig() string {
if DnsCustomEndpoint == "" {
return `provider "stackitprivatepreview" {}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
dns_custom_endpoint = "%s"
}`,
DnsCustomEndpoint,
)
}
func IaaSProviderConfig() string {
if IaaSCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
iaas_custom_endpoint = "%s"
}`,
IaaSCustomEndpoint,
)
}
func IaaSProviderConfigWithBetaResourcesEnabled() string {
if IaaSCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
enable_beta_resources = true
default_region = "eu01"
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
enable_beta_resources = true
iaas_custom_endpoint = "%s"
}`,
IaaSCustomEndpoint,
)
}
func IaaSProviderConfigWithExperiments() string {
if IaaSCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
experiments = [ "routing-tables", "network" ]
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
iaas_custom_endpoint = "%s"
experiments = [ "routing-tables", "network" ]
}`,
IaaSCustomEndpoint,
)
}
func KMSProviderConfig() string {
if KMSCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
kms_custom_endpoint = "%s"
}`,
KMSCustomEndpoint,
)
}
func LoadBalancerProviderConfig() string {
if LoadBalancerCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
loadbalancer_custom_endpoint = "%s"
}`,
LoadBalancerCustomEndpoint,
)
}
func LogMeProviderConfig() string {
if LogMeCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
logme_custom_endpoint = "%s"
}`,
LogMeCustomEndpoint,
)
}
func MariaDBProviderConfig() string {
if MariaDBCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
mariadb_custom_endpoint = "%s"
}`,
MariaDBCustomEndpoint,
)
}
func ModelServingProviderConfig() string {
if ModelServingCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
}
`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
modelserving_custom_endpoint = "%s"
}`,
ModelServingCustomEndpoint,
)
}
func MongoDBFlexProviderConfig() string {
if MongoDBFlexCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
mongodbflex_custom_endpoint = "%s"
}`,
MongoDBFlexCustomEndpoint,
)
}
func ObjectStorageProviderConfig() string {
if ObjectStorageCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
objectstorage_custom_endpoint = "%s"
}`,
ObjectStorageCustomEndpoint,
)
}
func OpenSearchProviderConfig() string {
if OpenSearchCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
opensearch_custom_endpoint = "%s"
}`,
OpenSearchCustomEndpoint,
)
}
func PostgresFlexProviderConfig(saFile string) string {
if PostgresFlexCustomEndpoint == "" {
return fmt.Sprintf(`
provider "stackitprivatepreview" {
default_region = "eu01"
service_account_key_path = "%s"
}`, saFile)
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
service_account_key_path = "%s"
postgresflex_custom_endpoint = "%s"
}`,
saFile,
PostgresFlexCustomEndpoint,
)
}
func RabbitMQProviderConfig() string {
if RabbitMQCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
rabbitmq_custom_endpoint = "%s"
}`,
RabbitMQCustomEndpoint,
)
}
func RedisProviderConfig() string {
if RedisCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
redis_custom_endpoint = "%s"
}`,
RedisCustomEndpoint,
)
}
func ResourceManagerProviderConfig() string {
key := GetTestProjectServiceAccountJson("")
if ResourceManagerCustomEndpoint == "" || AuthorizationCustomEndpoint == "" {
return fmt.Sprintf(`
provider "stackitprivatepreview" {
service_account_key = "%s"
}`,
key,
)
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
resourcemanager_custom_endpoint = "%s"
authorization_custom_endpoint = "%s"
service_account_token = "%s"
}`,
ResourceManagerCustomEndpoint,
AuthorizationCustomEndpoint,
key,
)
}
func SecretsManagerProviderConfig() string {
if SecretsManagerCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
secretsmanager_custom_endpoint = "%s"
}`,
SecretsManagerCustomEndpoint,
)
}
func SQLServerFlexProviderConfig(saFile string) string {
if SQLServerFlexCustomEndpoint == "" {
return fmt.Sprintf(`
provider "stackitprivatepreview" {
default_region = "eu01"
service_account_key_path = "%s"
}`, saFile)
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
service_account_key_path = "%s"
sqlserverflex_custom_endpoint = "%s"
}`,
saFile,
SQLServerFlexCustomEndpoint,
)
}
func ServerBackupProviderConfig() string {
if ServerBackupCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
enable_beta_resources = true
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
server_backup_custom_endpoint = "%s"
enable_beta_resources = true
}`,
ServerBackupCustomEndpoint,
)
}
func ServerUpdateProviderConfig() string {
if ServerUpdateCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
enable_beta_resources = true
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
server_update_custom_endpoint = "%s"
enable_beta_resources = true
}`,
ServerUpdateCustomEndpoint,
)
}
func SKEProviderConfig() string {
if SKECustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
ske_custom_endpoint = "%s"
}`,
SKECustomEndpoint,
)
}
func AuthorizationProviderConfig() string {
if AuthorizationCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
experiments = ["iam"]
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
authorization_custom_endpoint = "%s"
experiments = ["iam"]
}`,
AuthorizationCustomEndpoint,
)
}
func ServiceAccountProviderConfig() string {
if ServiceAccountCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
enable_beta_resources = true
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
service_account_custom_endpoint = "%s"
enable_beta_resources = true
}`,
ServiceAccountCustomEndpoint,
)
}
func GitProviderConfig() string {
if GitCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
enable_beta_resources = true
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
git_custom_endpoint = "%s"
enable_beta_resources = true
}`,
GitCustomEndpoint,
)
}
func ScfProviderConfig() string {
if ScfCustomEndpoint == "" {
return `
provider "stackitprivatepreview" {
default_region = "eu01"
}`
}
return fmt.Sprintf(`
provider "stackitprivatepreview" {
default_region = "eu01"
scf_custom_endpoint = "%s"
}`,
ScfCustomEndpoint,
)
}
func ResourceNameWithDateTime(name string) string {
dateTime := time.Now().Format(time.RFC3339)
// Remove timezone to have a smaller datetime
dateTimeTrimmed, _, _ := strings.Cut(dateTime, "+")
return fmt.Sprintf("tf-acc-%s-%s", name, dateTimeTrimmed)
}
func GetTestProjectServiceAccountJson(path string) string {
var err error
token, tokenSet := os.LookupEnv("TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_JSON")
if !tokenSet || token == "" {
token, err = readTestServiceAccountJsonFromFile(path)
if err != nil {
return ""
}
}
return token
}
//func GetTestProjectServiceAccountToken(path string) string {
// var err error
// token, tokenSet := os.LookupEnv("TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN")
// if !tokenSet || token == "" {
// token, err = readTestTokenFromCredentialsFile(path)
// if err != nil {
// return ""
// }
// }
// return token
//}
//
//func readTestTokenFromCredentialsFile(path string) (string, error) {
// if path == "" {
// customPath, customPathSet := os.LookupEnv("STACKIT_CREDENTIALS_PATH")
// if !customPathSet || customPath == "" {
// path = credentialsFilePath
// home, err := os.UserHomeDir()
// if err != nil {
// return "", fmt.Errorf("getting home directory: %w", err)
// }
// path = filepath.Join(home, path)
// } else {
// path = customPath
// }
// }
//
// credentialsRaw, err := os.ReadFile(path)
// if err != nil {
// return "", fmt.Errorf("opening file: %w", err)
// }
//
// var credentials struct {
// TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN string `json:"TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN"`
// }
// err = json.Unmarshal(credentialsRaw, &credentials)
// if err != nil {
// return "", fmt.Errorf("unmarshalling credentials: %w", err)
// }
// return credentials.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN, nil
//}
func readTestServiceAccountJsonFromFile(path string) (string, error) {
if path == "" {
customPath, customPathSet := os.LookupEnv("STACKIT_SERVICE_ACCOUNT_PATH")
if !customPathSet || customPath == "" {
path = serviceAccountFilePath
home, err := os.UserHomeDir()
if err != nil {
return "", fmt.Errorf("getting home directory: %w", err)
}
path = filepath.Join(home, path)
} else {
path = customPath
}
}
credentialsRaw, err := os.ReadFile(path)
if err != nil {
return "", fmt.Errorf("opening file: %w", err)
}
return string(credentialsRaw), nil
}
func getenv(key, defaultValue string) string {
val := os.Getenv(key)
if val == "" {
return defaultValue
}
return val
}
// CreateDefaultLocalFile is a helper for local_file_path. No real data is created
func CreateDefaultLocalFile() os.File {
// Define the file name and size
fileName := "test-512k.img"
size := 512 * 1024 // 512 KB
// Create the file
file, err := os.Create(fileName)
if err != nil {
panic(err)
}
// Seek to the desired position (512 KB)
_, err = file.Seek(int64(size), 0)
if err != nil {
panic(err)
}
return *file
}
func ConvertConfigVariable(variable config.Variable) string {
tmpByteArray, _ := variable.MarshalJSON()
// In case the variable is a string, the quotes should be removed
if tmpByteArray[0] == '"' && tmpByteArray[len(tmpByteArray)-1] == '"' {
result := string(tmpByteArray[1 : len(tmpByteArray)-1])
// Replace escaped quotes which where added MarshalJSON
rawString := strings.ReplaceAll(result, `\"`, `"`)
return rawString
}
return string(tmpByteArray)
}

View file

@ -0,0 +1,50 @@
// Copyright (c) STACKIT
package testutil
import (
"testing"
"github.com/hashicorp/terraform-plugin-testing/config"
)
func TestConvertConfigVariable(t *testing.T) {
tests := []struct {
name string
variable config.Variable
want string
}{
{
name: "string",
variable: config.StringVariable("test"),
want: "test",
},
{
name: "bool: true",
variable: config.BoolVariable(true),
want: "true",
},
{
name: "bool: false",
variable: config.BoolVariable(false),
want: "false",
},
{
name: "integer",
variable: config.IntegerVariable(10),
want: "10",
},
{
name: "quoted string",
variable: config.StringVariable(`instance =~ ".*"`),
want: `instance =~ ".*"`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := ConvertConfigVariable(tt.variable); got != tt.want {
t.Errorf("ConvertConfigVariable() = %v, want %v", got, tt.want)
}
})
}
}

View file

@ -1,39 +0,0 @@
package testutils
import (
"fmt"
"net/http"
"path/filepath"
"regexp"
"runtime"
"strings"
"github.com/jarcoal/httpmock"
)
func TestName() string {
pc, _, _, _ := runtime.Caller(1)
nameFull := runtime.FuncForPC(pc).Name()
nameEnd := filepath.Ext(nameFull)
name := strings.TrimPrefix(nameEnd, ".")
return name
}
func ActivateEnvironmentHttpMocks() {
httpmock.RegisterNoResponder(
func(req *http.Request) (*http.Response, error) {
return nil, fmt.Errorf("no responder found for %s %s, please check your http mocks", req.Method, req.URL)
},
)
httpmock.RegisterRegexpResponder(
"GET",
regexp.MustCompile(`^https://api\.bap\.microsoft\.com/providers/Microsoft\.BusinessAppPlatform/locations/(europe|unitedstates)/environmentLanguages\?api-version=2023-06-01$`),
func(_ *http.Request) (*http.Response, error) {
return httpmock.NewStringResponse(
http.StatusOK,
httpmock.File("../../services/languages/tests/datasource/Validate_Read/get_languages.json").String(),
), nil
},
)
}

View file

@ -53,9 +53,9 @@ func CreateTemporaryHome(createValidCredentialsFile bool, t *testing.T) string {
// Define content, default = invalid token // Define content, default = invalid token
token := "foo_token" token := "foo_token"
//if createValidCredentialsFile { if createValidCredentialsFile {
// token = GetTestProjectServiceAccountJson("") token = GetTestProjectServiceAccountJson("")
//} }
if _, err = file.WriteString(token); err != nil { if _, err = file.WriteString(token); err != nil {
t.Fatalf("Error writing to file: %v", err) t.Fatalf("Error writing to file: %v", err)
} }
@ -88,7 +88,7 @@ func CleanupTemporaryHome(tempHomePath string, t *testing.T) {
} }
func ucFirst(s string) string { func ucFirst(s string) string {
if s == "" { if len(s) == 0 {
return "" return ""
} }
return strings.ToUpper(s[:1]) + s[1:] return strings.ToUpper(s[:1]) + s[1:]
@ -123,7 +123,3 @@ func StringFromTemplate(tplFile string, data any) (string, error) {
return tplBuf.String(), nil return tplBuf.String(), nil
} }
func ResStr(prefix, resource, name string) string {
return fmt.Sprintf("%s_%s.%s", prefix, resource, name)
}

View file

@ -293,24 +293,25 @@ func RedisProviderConfig() string {
) )
} }
func ResourceManagerProviderConfig(saKeyPath string) string { func ResourceManagerProviderConfig() string {
key := GetTestProjectServiceAccountJson("")
if ResourceManagerCustomEndpoint == "" || AuthorizationCustomEndpoint == "" { if ResourceManagerCustomEndpoint == "" || AuthorizationCustomEndpoint == "" {
return fmt.Sprintf(` return fmt.Sprintf(`
provider "stackitprivatepreview" { provider "stackitprivatepreview" {
service_account_key_path = "%s" service_account_key = "%s"
}`, }`,
saKeyPath, key,
) )
} }
return fmt.Sprintf(` return fmt.Sprintf(`
provider "stackitprivatepreview" { provider "stackitprivatepreview" {
resourcemanager_custom_endpoint = "%s" resourcemanager_custom_endpoint = "%s"
authorization_custom_endpoint = "%s" authorization_custom_endpoint = "%s"
service_account_key_path = "%s" service_account_token = "%s"
}`, }`,
ResourceManagerCustomEndpoint, ResourceManagerCustomEndpoint,
AuthorizationCustomEndpoint, AuthorizationCustomEndpoint,
saKeyPath, key,
) )
} }

View file

@ -6,6 +6,7 @@ import (
"log/slog" "log/slog"
"os" "os"
"os/exec" "os/exec"
"path/filepath"
"strings" "strings"
"time" "time"
@ -19,8 +20,9 @@ import (
) )
const ( const (
// Default location of service account JSON // Default location of credentials JSON
serviceAccountFilePath = "service_account.json" // credentialsFilePath = ".stackit/credentials.json" //nolint:gosec // linter false positive
serviceAccountFilePath = ".stackit/service_account.json"
) )
var ( var (
@ -99,19 +101,19 @@ func ResourceNameWithDateTime(name string) string {
return fmt.Sprintf("tf-acc-%s-%s", name, dateTimeTrimmed) return fmt.Sprintf("tf-acc-%s-%s", name, dateTimeTrimmed)
} }
//func GetTestProjectServiceAccountJson(path string) string { func GetTestProjectServiceAccountJson(path string) string {
// var err error var err error
// json, ok := os.LookupEnv("TF_ACC_SERVICE_ACCOUNT_JSON_CONTENT") token, tokenSet := os.LookupEnv("TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_JSON")
// if !ok || json == "" { if !tokenSet || token == "" {
// json, err = readTestServiceAccountJsonFromFile(path) token, err = readTestServiceAccountJsonFromFile(path)
// if err != nil { if err != nil {
// return "" return ""
// } }
// } }
// return json return token
//} }
// func GetTestProjectServiceAccountToken(path string) string { //func GetTestProjectServiceAccountToken(path string) string {
// var err error // var err error
// token, tokenSet := os.LookupEnv("TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN") // token, tokenSet := os.LookupEnv("TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN")
// if !tokenSet || token == "" { // if !tokenSet || token == "" {
@ -123,7 +125,7 @@ func ResourceNameWithDateTime(name string) string {
// return token // return token
//} //}
// //
// func readTestTokenFromCredentialsFile(path string) (string, error) { //func readTestTokenFromCredentialsFile(path string) (string, error) {
// if path == "" { // if path == "" {
// customPath, customPathSet := os.LookupEnv("STACKIT_CREDENTIALS_PATH") // customPath, customPathSet := os.LookupEnv("STACKIT_CREDENTIALS_PATH")
// if !customPathSet || customPath == "" { // if !customPathSet || customPath == "" {
@ -153,30 +155,27 @@ func ResourceNameWithDateTime(name string) string {
// return credentials.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN, nil // return credentials.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN, nil
//} //}
//func readTestServiceAccountJsonFromFile(path string) (string, error) { func readTestServiceAccountJsonFromFile(path string) (string, error) {
// if path == "" { if path == "" {
// customPath, ok := os.LookupEnv("TF_ACC_SERVICE_ACCOUNT_FILE") customPath, customPathSet := os.LookupEnv("STACKIT_SERVICE_ACCOUNT_PATH")
// if !ok || customPath == "" { if !customPathSet || customPath == "" {
// path = serviceAccountFilePath path = serviceAccountFilePath
// // TODO: check if we want to handle this with a home dir home, err := os.UserHomeDir()
// /* if err != nil {
// home, err := os.UserHomeDir() return "", fmt.Errorf("getting home directory: %w", err)
// if err != nil { }
// return "", fmt.Errorf("getting home directory: %w", err) path = filepath.Join(home, path)
// } } else {
// path = filepath.Join(home, path) path = customPath
// */ }
// } else { }
// path = customPath
// } credentialsRaw, err := os.ReadFile(path)
// } if err != nil {
// return "", fmt.Errorf("opening file: %w", err)
// credentialsRaw, err := os.ReadFile(path) }
// if err != nil { return string(credentialsRaw), nil
// return "", fmt.Errorf("opening file: %w", err) }
// }
// return string(credentialsRaw), nil
//}
func getenv(key, defaultValue string) string { func getenv(key, defaultValue string) string {
val := os.Getenv(key) val := os.Getenv(key)

View file

@ -6,7 +6,6 @@ import (
"log" "log"
"github.com/hashicorp/terraform-plugin-framework/providerserver" "github.com/hashicorp/terraform-plugin-framework/providerserver"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit"
) )

View file

@ -65,15 +65,15 @@ resource "stackitprivatepreview_postgresflexalpha_instance" "msh-sna-pe-example2
resource "stackitprivatepreview_postgresflexalpha_user" "ptlsdbadminuser" { resource "stackitprivatepreview_postgresflexalpha_user" "ptlsdbadminuser" {
project_id = var.project_id project_id = var.project_id
instance_id = stackitprivatepreview_postgresflexalpha_instance.msh-sna-pe-example.instance_id instance_id = stackitprivatepreview_postgresflexalpha_instance.msh-sna-pe-example.instance_id
name = var.db_admin_username username = var.db_admin_username
roles = ["createdb", "login", "login"] roles = ["createdb", "login"]
# roles = ["createdb", "login", "createrole"] # roles = ["createdb", "login", "createrole"]
} }
resource "stackitprivatepreview_postgresflexalpha_user" "ptlsdbadminuser2" { resource "stackitprivatepreview_postgresflexalpha_user" "ptlsdbadminuser2" {
project_id = var.project_id project_id = var.project_id
instance_id = stackitprivatepreview_postgresflexalpha_instance.msh-sna-pe-example2.instance_id instance_id = stackitprivatepreview_postgresflexalpha_instance.msh-sna-pe-example2.instance_id
name = var.db_admin_username username = var.db_admin_username
roles = ["createdb", "login"] roles = ["createdb", "login"]
# roles = ["createdb", "login", "createrole"] # roles = ["createdb", "login", "createrole"]
} }
@ -81,7 +81,7 @@ resource "stackitprivatepreview_postgresflexalpha_user" "ptlsdbadminuser2" {
resource "stackitprivatepreview_postgresflexalpha_user" "ptlsdbuser" { resource "stackitprivatepreview_postgresflexalpha_user" "ptlsdbuser" {
project_id = var.project_id project_id = var.project_id
instance_id = stackitprivatepreview_postgresflexalpha_instance.msh-sna-pe-example.instance_id instance_id = stackitprivatepreview_postgresflexalpha_instance.msh-sna-pe-example.instance_id
name = var.db_name username = var.db_username
roles = ["login"] roles = ["login"]
# roles = ["createdb", "login", "createrole"] # roles = ["createdb", "login", "createrole"]
} }

View file

@ -1,5 +1,5 @@
data "stackitprivatepreview_sqlserverflexbeta_flavor" "sqlserver_flavor" { data "stackitprivatepreview_sqlserverflexalpha_flavor" "sqlserver_flavor" {
project_id = var.project_id project_id = var.project_id
region = "eu01" region = "eu01"
cpu = 4 cpu = 4
@ -9,5 +9,5 @@ data "stackitprivatepreview_sqlserverflexbeta_flavor" "sqlserver_flavor" {
} }
output "sqlserver_flavor" { output "sqlserver_flavor" {
value = data.stackitprivatepreview_sqlserverflexbeta_flavor.sqlserver_flavor.flavor_id value = data.stackitprivatepreview_sqlserverflexalpha_flavor.sqlserver_flavor.flavor_id
} }

View file

@ -18,15 +18,15 @@
# value = stackit_kms_key.key.key_id # value = stackit_kms_key.key.key_id
# } # }
resource "stackitprivatepreview_sqlserverflexbeta_instance" "msh-beta-sna-001" { resource "stackitprivatepreview_sqlserverflexalpha_instance" "msh-sna-001" {
project_id = var.project_id project_id = var.project_id
name = "msh-beta-sna-001" name = "msh-sna-001"
backup_schedule = "0 3 * * *" backup_schedule = "0 3 * * *"
retention_days = 31 retention_days = 31
flavor_id = data.stackitprivatepreview_sqlserverflexbeta_flavor.sqlserver_flavor.flavor_id flavor_id = data.stackitprivatepreview_sqlserverflexalpha_flavor.sqlserver_flavor.flavor_id
storage = { storage = {
class = "premium-perf2-stackit" class = "premium-perf2-stackit"
size = 10 size = 50
} }
version = 2022 version = 2022
encryption = { encryption = {
@ -34,11 +34,9 @@ resource "stackitprivatepreview_sqlserverflexbeta_instance" "msh-beta-sna-001" {
#keyring_id = stackit_kms_keyring.keyring.keyring_id #keyring_id = stackit_kms_keyring.keyring.keyring_id
#key_version = 1 #key_version = 1
# key with scope public # key with scope public
# kek_key_id = "fe039bcf-8d7b-431a-801d-9e81371a6b7b" kek_key_id = "fe039bcf-8d7b-431a-801d-9e81371a6b7b"
kek_key_id = "c6878f92-ce55-4b79-8236-ba9d001d7967" # msh-k-001
# key_id = var.key_id # key_id = var.key_id
# kek_key_ring_id = var.keyring_id kek_key_ring_id = var.keyring_id
kek_key_ring_id = "0dea3f5f-9947-4dda-a9d3-18418832cefe" # msh-kr-sna01
kek_key_version = var.key_version kek_key_version = var.key_version
service_account = var.sa_email service_account = var.sa_email
} }
@ -48,16 +46,83 @@ resource "stackitprivatepreview_sqlserverflexbeta_instance" "msh-beta-sna-001" {
} }
} }
resource "stackitprivatepreview_sqlserverflexbeta_user" "betauser" { resource "stackitprivatepreview_sqlserverflexalpha_instance" "msh-sna-101" {
project_id = var.project_id project_id = var.project_id
instance_id = stackitprivatepreview_sqlserverflexbeta_instance.msh-beta-sna-001.instance_id name = "msh-sna-101"
username = "betauser" backup_schedule = "0 3 * * *"
roles = ["##STACKIT_DatabaseManager##", "##STACKIT_LoginManager##"] retention_days = 31
flavor_id = data.stackitprivatepreview_sqlserverflexalpha_flavor.sqlserver_flavor.flavor_id
storage = {
class = "premium-perf2-stackit"
size = 50
}
version = 2022
encryption = {
#key_id = stackit_kms_key.key.key_id
#keyring_id = stackit_kms_keyring.keyring.keyring_id
#key_version = 1
# key with scope public
kek_key_id = "fe039bcf-8d7b-431a-801d-9e81371a6b7b"
# key_id = var.key_id
kek_key_ring_id = var.keyring_id
kek_key_version = var.key_version
service_account = var.sa_email
}
network = {
acl = ["0.0.0.0/0", "193.148.160.0/19"]
access_scope = "SNA"
}
} }
resource "stackitprivatepreview_sqlserverflexbeta_database" "betadb" { resource "stackitprivatepreview_sqlserverflexalpha_instance" "msh-nosna-001" {
project_id = var.project_id project_id = var.project_id
instance_id = stackitprivatepreview_sqlserverflexbeta_instance.msh-beta-sna-001.instance_id name = "msh-nosna-001"
name = "mshtest002" backup_schedule = "0 3 * * *"
owner = stackitprivatepreview_sqlserverflexbeta_user.betauser.username retention_days = 31
flavor_id = data.stackitprivatepreview_sqlserverflexalpha_flavor.sqlserver_flavor.flavor_id
storage = {
class = "premium-perf2-stackit"
size = 50
}
version = 2022
# encryption = {
# #key_id = stackit_kms_key.key.key_id
# #keyring_id = stackit_kms_keyring.keyring.keyring_id
# #key_version = 1
# #key_id = var.key_id
# # key with scope public
# key_id = "fe039bcf-8d7b-431a-801d-9e81371a6b7b"
# keyring_id = var.keyring_id
# key_version = var.key_version
# service_account = var.sa_email
# }
network = {
acl = ["0.0.0.0/0", "193.148.160.0/19"]
access_scope = "PUBLIC"
}
} }
# data "stackitprivatepreview_sqlserverflexalpha_instance" "test" {
# project_id = var.project_id
# instance_id = var.instance_id
# region = "eu01"
# }
# output "test" {
# value = data.stackitprivatepreview_sqlserverflexalpha_instance.test
# }
# resource "stackitprivatepreview_sqlserverflexalpha_user" "ptlsdbadminuser" {
# project_id = var.project_id
# instance_id = stackitprivatepreview_sqlserverflexalpha_instance.sqlsrv.instance_id
# username = var.db_admin_username
# roles = ["##STACKIT_LoginManager##", "##STACKIT_DatabaseManager##"]
# }
# resource "stackitprivatepreview_sqlserverflexalpha_user" "ptlsdbuser" {
# project_id = var.project_id
# instance_id = stackitprivatepreview_sqlserverflexalpha_instance.sqlsrv.instance_id
# username = var.db_username
# roles = ["##STACKIT_LoginManager##"]
# }

19
scripts/lint-golangci-lint.sh Executable file
View file

@ -0,0 +1,19 @@
#!/usr/bin/env bash
# This script lints the SDK modules and the internal examples
# Pre-requisites: golangci-lint
set -eo pipefail
ROOT_DIR=$(git rev-parse --show-toplevel)
GOLANG_CI_YAML_PATH="${ROOT_DIR}/golang-ci.yaml"
GOLANG_CI_ARGS="--allow-parallel-runners --timeout=5m --config=${GOLANG_CI_YAML_PATH}"
if type -p golangci-lint >/dev/null; then
:
else
echo "golangci-lint not installed, unable to proceed."
exit 1
fi
cd ${ROOT_DIR}
golangci-lint run ${GOLANG_CI_ARGS}

View file

@ -17,7 +17,11 @@ elif [ "$action" = "tools" ]; then
go mod download go mod download
go install golang.org/x/tools/cmd/goimports@v0.42.0 # go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.62.0
go install github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.7.2
# go install github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs@v0.21.0
go install github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs@v0.24.0
else else
echo "Invalid action: '$action', please use $0 help for help" echo "Invalid action: '$action', please use $0 help for help"
fi fi

View file

@ -14,5 +14,5 @@ fi
mkdir -p ${ROOT_DIR}/docs mkdir -p ${ROOT_DIR}/docs
echo ">> Generating documentation" echo ">> Generating documentation"
go run github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs generate \ tfplugindocs generate \
--provider-name "stackitprivatepreview" --provider-name "stackitprivatepreview"

View file

@ -1,3 +0,0 @@
versions:
- name: alpha
path: v3alpha1

View file

@ -1,5 +0,0 @@
versions:
- name: alpha
path: v3alpha1
- name: beta
path: v3beta1

View file

@ -11,7 +11,6 @@ import (
"github.com/hashicorp/terraform-plugin-framework/attr" "github.com/hashicorp/terraform-plugin-framework/attr"
"github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes" "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
) )

View file

@ -8,7 +8,6 @@ import (
"testing" "testing"
"github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/diag"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
"github.com/google/go-cmp/cmp" "github.com/google/go-cmp/cmp"

View file

@ -32,7 +32,7 @@ const (
type EphemeralProviderData struct { type EphemeralProviderData struct {
ProviderData ProviderData
PrivateKey string //nolint:gosec //this is a placeholder and not used in this code PrivateKey string
PrivateKeyPath string PrivateKeyPath string
ServiceAccountKey string ServiceAccountKey string
ServiceAccountKeyPath string ServiceAccountKeyPath string
@ -105,13 +105,11 @@ func DiagsToError(diags diag.Diagnostics) error {
diagsError := diags.Errors() diagsError := diags.Errors()
diagsStrings := make([]string, 0) diagsStrings := make([]string, 0)
for _, diagnostic := range diagsError { for _, diagnostic := range diagsError {
diagsStrings = append( diagsStrings = append(diagsStrings, fmt.Sprintf(
diagsStrings, fmt.Sprintf(
"(%s) %s", "(%s) %s",
diagnostic.Summary(), diagnostic.Summary(),
diagnostic.Detail(), diagnostic.Detail(),
), ))
)
} }
return fmt.Errorf("%s", strings.Join(diagsStrings, ";")) return fmt.Errorf("%s", strings.Join(diagsStrings, ";"))
} }
@ -138,22 +136,14 @@ func LogAndAddWarning(ctx context.Context, diags *diag.Diagnostics, summary, det
func LogAndAddWarningBeta(ctx context.Context, diags *diag.Diagnostics, name string, resourceType ResourceType) { func LogAndAddWarningBeta(ctx context.Context, diags *diag.Diagnostics, name string, resourceType ResourceType) {
warnTitle := fmt.Sprintf("The %s %q is in beta", resourceType, name) warnTitle := fmt.Sprintf("The %s %q is in beta", resourceType, name)
warnContent := fmt.Sprintf( warnContent := fmt.Sprintf("The %s %q is in beta and may be subject to breaking changes in the future. Use with caution.", resourceType, name)
"The %s %q is in beta and may be subject to breaking changes in the future. Use with caution.",
resourceType,
name,
)
tflog.Warn(ctx, fmt.Sprintf("%s | %s", warnTitle, warnContent)) tflog.Warn(ctx, fmt.Sprintf("%s | %s", warnTitle, warnContent))
diags.AddWarning(warnTitle, warnContent) diags.AddWarning(warnTitle, warnContent)
} }
func LogAndAddErrorBeta(ctx context.Context, diags *diag.Diagnostics, name string, resourceType ResourceType) { func LogAndAddErrorBeta(ctx context.Context, diags *diag.Diagnostics, name string, resourceType ResourceType) {
errTitle := fmt.Sprintf("The %s %q is in beta and beta is not enabled", resourceType, name) errTitle := fmt.Sprintf("The %s %q is in beta and beta is not enabled", resourceType, name)
errContent := fmt.Sprintf( errContent := fmt.Sprintf(`The %s %q is in beta and the beta functionality is currently not enabled. To enable it, set the environment variable STACKIT_TF_ENABLE_BETA_RESOURCES to "true" or set the "enable_beta_resources" provider field to true.`, resourceType, name)
`The %s %q is in beta and the beta functionality is currently not enabled. To enable it, set the environment variable STACKIT_TF_ENABLE_BETA_RESOURCES to "true" or set the "enable_beta_resources" provider field to true.`,
resourceType,
name,
)
tflog.Error(ctx, fmt.Sprintf("%s | %s", errTitle, errContent)) tflog.Error(ctx, fmt.Sprintf("%s | %s", errTitle, errContent))
diags.AddError(errTitle, errContent) diags.AddError(errTitle, errContent)
} }
@ -171,10 +161,8 @@ func LogResponse(ctx context.Context) context.Context {
traceId := runtime.GetTraceId(ctx) traceId := runtime.GetTraceId(ctx)
ctx = tflog.SetField(ctx, "x-trace-id", traceId) ctx = tflog.SetField(ctx, "x-trace-id", traceId)
tflog.Info( tflog.Info(ctx, "response data", map[string]interface{}{
ctx, "response data", map[string]interface{}{
"x-trace-id": traceId, "x-trace-id": traceId,
}, })
)
return ctx return ctx
} }

View file

@ -1,237 +0,0 @@
package core
import (
"context"
"crypto/rand"
"errors"
"fmt"
"math/big"
"net/http"
"time"
"github.com/hashicorp/terraform-plugin-log/tflog"
)
const (
// backoffMultiplier is the factor by which the delay is multiplied for exponential backoff.
backoffMultiplier = 2
// jitterFactor is the divisor used to calculate jitter (e.g., half of the base delay).
jitterFactor = 2
)
var (
// ErrRequestFailedAfterRetries is returned when a request fails after all retry attempts.
ErrRequestFailedAfterRetries = errors.New("request failed after all retry attempts")
)
// RetryRoundTripper implements an http.RoundTripper that adds automatic retry logic for failed requests.
type RetryRoundTripper struct {
next http.RoundTripper
maxRetries int
initialDelay time.Duration
maxDelay time.Duration
perTryTimeout time.Duration
}
// NewRetryRoundTripper creates a new instance of the RetryRoundTripper with the specified configuration.
func NewRetryRoundTripper(
next http.RoundTripper,
maxRetries int,
initialDelay, maxDelay, perTryTimeout time.Duration,
) *RetryRoundTripper {
return &RetryRoundTripper{
next: next,
maxRetries: maxRetries,
initialDelay: initialDelay,
maxDelay: maxDelay,
perTryTimeout: perTryTimeout,
}
}
// RoundTrip executes the request and retries on failure.
func (rrt *RetryRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) {
resp, err := rrt.executeRequest(req)
if !rrt.shouldRetry(resp, err) {
if err != nil {
return resp, fmt.Errorf("initial request failed, not retrying: %w", err)
}
return resp, nil
}
return rrt.retryLoop(req, resp, err)
}
// executeRequest performs a single HTTP request with a per-try timeout.
func (rrt *RetryRoundTripper) executeRequest(req *http.Request) (*http.Response, error) {
ctx, cancel := context.WithTimeout(req.Context(), rrt.perTryTimeout)
defer cancel()
resp, err := rrt.next.RoundTrip(req.WithContext(ctx))
if err != nil {
if errors.Is(err, context.DeadlineExceeded) {
return resp, fmt.Errorf("per-try timeout of %v exceeded: %w", rrt.perTryTimeout, err)
}
return resp, fmt.Errorf("http roundtrip failed: %w", err)
}
return resp, nil
}
// retryLoop handles the retry logic for a failed request.
func (rrt *RetryRoundTripper) retryLoop(
req *http.Request,
initialResp *http.Response,
initialErr error,
) (*http.Response, error) {
var (
lastErr = initialErr
resp = initialResp
currentDelay = rrt.initialDelay
)
ctx := req.Context()
for attempt := 1; attempt <= rrt.maxRetries; attempt++ {
rrt.logRetryAttempt(ctx, attempt, currentDelay, lastErr)
waitDuration := rrt.calculateWaitDurationWithJitter(ctx, currentDelay)
if err := rrt.waitForDelay(ctx, waitDuration); err != nil {
return nil, err // Context was canceled during wait.
}
// Exponential backoff for the next potential retry.
currentDelay = rrt.updateCurrentDelay(currentDelay)
// Retry attempt.
resp, lastErr = rrt.executeRequest(req)
if !rrt.shouldRetry(resp, lastErr) {
if lastErr != nil {
return resp, fmt.Errorf("request failed on retry attempt %d: %w", attempt, lastErr)
}
return resp, nil
}
}
return nil, rrt.handleFinalError(ctx, resp, lastErr)
}
// logRetryAttempt logs the details of a retry attempt.
func (rrt *RetryRoundTripper) logRetryAttempt(
ctx context.Context,
attempt int,
delay time.Duration,
err error,
) {
tflog.Info(
ctx, "Request failed, retrying...", map[string]interface{}{
"attempt": attempt,
"max_attempts": rrt.maxRetries,
"delay": delay,
"error": err,
},
)
}
// updateCurrentDelay calculates the next delay for exponential backoff.
func (rrt *RetryRoundTripper) updateCurrentDelay(currentDelay time.Duration) time.Duration {
currentDelay *= backoffMultiplier
if currentDelay > rrt.maxDelay {
return rrt.maxDelay
}
return currentDelay
}
// handleFinalError constructs and returns the final error after all retries have been exhausted.
func (rrt *RetryRoundTripper) handleFinalError(
ctx context.Context,
resp *http.Response,
lastErr error,
) error {
if resp != nil {
if err := resp.Body.Close(); err != nil {
tflog.Warn(
ctx, "Failed to close response body", map[string]interface{}{
"error": err.Error(),
},
)
}
}
if lastErr != nil {
return fmt.Errorf("%w: %w", ErrRequestFailedAfterRetries, lastErr)
}
// This case occurs if shouldRetry was true due to a retryable status code,
// but all retries failed with similar status codes.
if resp != nil {
return fmt.Errorf(
"%w: last retry attempt failed with status code %d",
ErrRequestFailedAfterRetries,
resp.StatusCode,
)
}
return fmt.Errorf("%w: no response received", ErrRequestFailedAfterRetries)
}
// shouldRetry determines if a request should be retried based on the response or an error.
func (rrt *RetryRoundTripper) shouldRetry(resp *http.Response, err error) bool {
if err != nil {
return true
}
if resp != nil {
if resp.StatusCode == http.StatusBadGateway ||
resp.StatusCode == http.StatusServiceUnavailable ||
resp.StatusCode == http.StatusGatewayTimeout {
return true
}
}
return false
}
// calculateWaitDurationWithJitter calculates the backoff duration for the next retry,
// adding a random jitter to prevent thundering herd issues.
func (rrt *RetryRoundTripper) calculateWaitDurationWithJitter(
ctx context.Context,
baseDelay time.Duration,
) time.Duration {
if baseDelay <= 0 {
return 0
}
maxJitter := int64(baseDelay / jitterFactor)
if maxJitter <= 0 {
return baseDelay
}
random, err := rand.Int(rand.Reader, big.NewInt(maxJitter))
if err != nil {
tflog.Warn(
ctx, "Failed to generate random jitter, proceeding without it.", map[string]interface{}{
"error": err.Error(),
},
)
return baseDelay
}
jitter := time.Duration(random.Int64())
return baseDelay + jitter
}
// waitForDelay pauses execution for a given duration or until the context is canceled.
func (rrt *RetryRoundTripper) waitForDelay(ctx context.Context, delay time.Duration) error {
select {
case <-ctx.Done():
return fmt.Errorf("context canceled during backoff wait: %w", ctx.Err())
case <-time.After(delay):
return nil
}
}

View file

@ -1,252 +0,0 @@
package core
import (
"context"
"errors"
"io"
"net/http"
"net/http/httptest"
"strings"
"sync/atomic"
"testing"
"time"
)
type mockRoundTripper struct {
roundTripFunc func(req *http.Request) (*http.Response, error)
callCount int32
}
func (m *mockRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) {
atomic.AddInt32(&m.callCount, 1)
return m.roundTripFunc(req)
}
func (m *mockRoundTripper) CallCount() int32 {
return atomic.LoadInt32(&m.callCount)
}
func TestRetryRoundTripper_RoundTrip(t *testing.T) {
t.Parallel()
testRetryConfig := func(next http.RoundTripper) *RetryRoundTripper {
return NewRetryRoundTripper(
next,
3,
1*time.Millisecond,
10*time.Millisecond,
50*time.Millisecond,
)
}
noRetryTests := []struct {
name string
mockStatusCode int
expectedStatusCode int
}{
{
name: "should succeed on the first try",
mockStatusCode: http.StatusOK,
expectedStatusCode: http.StatusOK,
},
{
name: "should not retry on a non-retryable status code like 400",
mockStatusCode: http.StatusBadRequest,
expectedStatusCode: http.StatusBadRequest,
},
}
for _, testCase := range noRetryTests {
t.Run(
testCase.name, func(t *testing.T) {
t.Parallel()
mock := &mockRoundTripper{
roundTripFunc: func(req *http.Request) (*http.Response, error) {
return &http.Response{
StatusCode: testCase.mockStatusCode,
Body: io.NopCloser(nil),
Request: req,
}, nil
},
}
tripper := testRetryConfig(mock)
req := httptest.NewRequest(http.MethodGet, "/", http.NoBody)
resp, err := tripper.RoundTrip(req)
if resp != nil {
defer func() {
if closeErr := resp.Body.Close(); closeErr != nil {
t.Errorf("failed to close response body: %v", closeErr)
}
}()
}
if err != nil {
t.Fatalf("expected no error, got %v", err)
}
if resp.StatusCode != testCase.expectedStatusCode {
t.Fatalf("expected status code %d, got %d", testCase.expectedStatusCode, resp.StatusCode)
}
if mock.CallCount() != 1 {
t.Fatalf("expected 1 call, got %d", mock.CallCount())
}
},
)
}
t.Run(
"should retry on retryable status code (503) and eventually fail", func(t *testing.T) {
t.Parallel()
mock := &mockRoundTripper{
roundTripFunc: func(req *http.Request) (*http.Response, error) {
return &http.Response{
StatusCode: http.StatusServiceUnavailable,
Body: io.NopCloser(nil),
Request: req,
}, nil
},
}
tripper := testRetryConfig(mock)
req := httptest.NewRequest(http.MethodGet, "/", http.NoBody)
resp, err := tripper.RoundTrip(req)
if resp != nil {
defer func() {
if closeErr := resp.Body.Close(); closeErr != nil {
t.Errorf("failed to close response body: %v", closeErr)
}
}()
}
if err == nil {
t.Fatal("expected an error, but got nil")
}
expectedErrorMsg := "last retry attempt failed with status code 503"
if !strings.Contains(err.Error(), expectedErrorMsg) {
t.Fatalf("expected error to contain %q, got %q", expectedErrorMsg, err.Error())
}
if mock.CallCount() != 4 { // 1 initial + 3 retries
t.Fatalf("expected 4 calls, got %d", mock.CallCount())
}
},
)
t.Run(
"should succeed after one retry", func(t *testing.T) {
t.Parallel()
mock := &mockRoundTripper{}
mock.roundTripFunc = func(req *http.Request) (*http.Response, error) {
if mock.CallCount() < 2 {
return &http.Response{
StatusCode: http.StatusServiceUnavailable,
Body: io.NopCloser(nil),
Request: req,
}, nil
}
return &http.Response{
StatusCode: http.StatusOK,
Body: io.NopCloser(nil),
Request: req,
}, nil
}
tripper := testRetryConfig(mock)
req := httptest.NewRequest(http.MethodGet, "/", http.NoBody)
resp, err := tripper.RoundTrip(req)
if resp != nil {
defer func() {
if closeErr := resp.Body.Close(); closeErr != nil {
t.Errorf("failed to close response body: %v", closeErr)
}
}()
}
if err != nil {
t.Fatalf("expected no error, got %v", err)
}
if resp.StatusCode != http.StatusOK {
t.Fatalf("expected status code %d, got %d", http.StatusOK, resp.StatusCode)
}
if mock.CallCount() != 2 {
t.Fatalf("expected 2 calls, got %d", mock.CallCount())
}
},
)
t.Run(
"should retry on network error", func(t *testing.T) {
t.Parallel()
mockErr := errors.New("simulated network error")
mock := &mockRoundTripper{
roundTripFunc: func(_ *http.Request) (*http.Response, error) {
return nil, mockErr
},
}
tripper := testRetryConfig(mock)
req := httptest.NewRequest(http.MethodGet, "/", http.NoBody)
resp, err := tripper.RoundTrip(req)
if resp != nil {
defer func() {
if closeErr := resp.Body.Close(); closeErr != nil {
t.Errorf("failed to close response body: %v", closeErr)
}
}()
}
if !errors.Is(err, mockErr) {
t.Fatalf("expected error to be %v, got %v", mockErr, err)
}
if mock.CallCount() != 4 { // 1 initial + 3 retries
t.Fatalf("expected 4 calls, got %d", mock.CallCount())
}
},
)
t.Run(
"should abort retries if the main context is canceled", func(t *testing.T) {
t.Parallel()
mock := &mockRoundTripper{
roundTripFunc: func(req *http.Request) (*http.Response, error) {
select {
case <-time.After(100 * time.Millisecond):
return nil, errors.New("this should not be returned")
case <-req.Context().Done():
return nil, req.Context().Err()
}
},
}
tripper := testRetryConfig(mock)
baseCtx := context.Background()
ctx, cancel := context.WithTimeout(baseCtx, 20*time.Millisecond)
defer cancel()
req := httptest.NewRequest(http.MethodGet, "/", http.NoBody).WithContext(ctx)
resp, err := tripper.RoundTrip(req)
if resp != nil {
defer func() {
if closeErr := resp.Body.Close(); closeErr != nil {
t.Errorf("failed to close response body: %v", closeErr)
}
}()
}
if !errors.Is(err, context.DeadlineExceeded) {
t.Fatalf("expected error to be context.DeadlineExceeded, got %v", err)
}
if mock.CallCount() != 1 {
t.Fatalf("expected 1 call, got %d", mock.CallCount())
}
},
)
}

View file

@ -9,7 +9,6 @@ import (
"strings" "strings"
"github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/diag"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
) )

View file

@ -7,7 +7,6 @@ import (
"testing" "testing"
"github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/diag"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
) )

View file

@ -10,7 +10,6 @@ import (
"github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/hashicorp/terraform-plugin-log/tflog" "github.com/hashicorp/terraform-plugin-log/tflog"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
) )

View file

@ -7,7 +7,6 @@ import (
"testing" "testing"
"github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/diag"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
) )

View file

@ -10,14 +10,13 @@ import (
"github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-log/tflog" "github.com/hashicorp/terraform-plugin-log/tflog"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
pgDsGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/datasources_gen" postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/datasources_gen"
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils" postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
) )
// Ensure the implementation satisfies the expected interfaces. // Ensure the implementation satisfies the expected interfaces.
@ -32,13 +31,13 @@ func NewDatabaseDataSource() datasource.DataSource {
// dataSourceModel maps the data source schema data. // dataSourceModel maps the data source schema data.
type dataSourceModel struct { type dataSourceModel struct {
pgDsGen.DatabaseModel postgresflexalpha2.DatabaseModel
TerraformID types.String `tfsdk:"id"` TerraformID types.String `tfsdk:"id"`
} }
// databaseDataSource is the data source implementation. // databaseDataSource is the data source implementation.
type databaseDataSource struct { type databaseDataSource struct {
client *v3alpha1api.APIClient client *postgresflexalpha.APIClient
providerData core.ProviderData providerData core.ProviderData
} }
@ -73,7 +72,8 @@ func (r *databaseDataSource) Configure(
// Schema defines the schema for the data source. // Schema defines the schema for the data source.
func (r *databaseDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { func (r *databaseDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
s := pgDsGen.DatabaseDataSourceSchema(ctx)
s := postgresflexalpha2.DatabaseDataSourceSchema(ctx)
s.Attributes["id"] = schema.StringAttribute{ s.Attributes["id"] = schema.StringAttribute{
Description: "Terraform's internal resource ID. It is structured as \\\"`project_id`,`region`,`instance_id`," + Description: "Terraform's internal resource ID. It is structured as \\\"`project_id`,`region`,`instance_id`," +
"`database_id`\\\".\",", "`database_id`\\\".\",",
@ -144,7 +144,7 @@ func (r *databaseDataSource) getDatabaseByNameOrID(
model *dataSourceModel, model *dataSourceModel,
projectId, region, instanceId string, projectId, region, instanceId string,
diags *diag.Diagnostics, diags *diag.Diagnostics,
) (*v3alpha1api.ListDatabase, error) { ) (*postgresflexalpha.ListDatabase, error) {
isIdSet := !model.DatabaseId.IsNull() && !model.DatabaseId.IsUnknown() isIdSet := !model.DatabaseId.IsNull() && !model.DatabaseId.IsUnknown()
isNameSet := !model.Name.IsNull() && !model.Name.IsUnknown() isNameSet := !model.Name.IsNull() && !model.Name.IsUnknown()
@ -159,12 +159,12 @@ func (r *databaseDataSource) getDatabaseByNameOrID(
if isIdSet { if isIdSet {
databaseId := model.DatabaseId.ValueInt64() databaseId := model.DatabaseId.ValueInt64()
ctx = tflog.SetField(ctx, "database_id", databaseId) ctx = tflog.SetField(ctx, "database_id", databaseId)
return getDatabaseById(ctx, r.client.DefaultAPI, projectId, region, instanceId, databaseId) return getDatabaseById(ctx, r.client, projectId, region, instanceId, databaseId)
} }
databaseName := model.Name.ValueString() databaseName := model.Name.ValueString()
ctx = tflog.SetField(ctx, "name", databaseName) ctx = tflog.SetField(ctx, "name", databaseName)
return getDatabaseByName(ctx, r.client.DefaultAPI, projectId, region, instanceId, databaseName) return getDatabaseByName(ctx, r.client, projectId, region, instanceId, databaseName)
} }
// handleReadError centralizes API error handling for the Read operation. // handleReadError centralizes API error handling for the Read operation.

View file

@ -5,7 +5,7 @@ import (
"fmt" "fmt"
"strings" "strings"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api" postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
) )
// databaseClientReader represents the contract to listing databases from postgresflex.APIClient. // databaseClientReader represents the contract to listing databases from postgresflex.APIClient.
@ -15,7 +15,7 @@ type databaseClientReader interface {
projectId string, projectId string,
region string, region string,
instanceId string, instanceId string,
) v3alpha1api.ApiListDatabasesRequestRequest ) postgresflex.ApiListDatabasesRequestRequest
} }
// getDatabaseById gets a database by its ID. // getDatabaseById gets a database by its ID.
@ -24,9 +24,9 @@ func getDatabaseById(
client databaseClientReader, client databaseClientReader,
projectId, region, instanceId string, projectId, region, instanceId string,
databaseId int64, databaseId int64,
) (*v3alpha1api.ListDatabase, error) { ) (*postgresflex.ListDatabase, error) {
filter := func(db v3alpha1api.ListDatabase) bool { filter := func(db postgresflex.ListDatabase) bool {
return int64(db.Id) == databaseId return db.Id != nil && *db.Id == databaseId
} }
return getDatabase(ctx, client, projectId, region, instanceId, filter) return getDatabase(ctx, client, projectId, region, instanceId, filter)
} }
@ -36,9 +36,9 @@ func getDatabaseByName(
ctx context.Context, ctx context.Context,
client databaseClientReader, client databaseClientReader,
projectId, region, instanceId, databaseName string, projectId, region, instanceId, databaseName string,
) (*v3alpha1api.ListDatabase, error) { ) (*postgresflex.ListDatabase, error) {
filter := func(db v3alpha1api.ListDatabase) bool { filter := func(db postgresflex.ListDatabase) bool {
return db.Name == databaseName return db.Name != nil && *db.Name == databaseName
} }
return getDatabase(ctx, client, projectId, region, instanceId, filter) return getDatabase(ctx, client, projectId, region, instanceId, filter)
} }
@ -49,8 +49,8 @@ func getDatabase(
ctx context.Context, ctx context.Context,
client databaseClientReader, client databaseClientReader,
projectId, region, instanceId string, projectId, region, instanceId string,
filter func(db v3alpha1api.ListDatabase) bool, filter func(db postgresflex.ListDatabase) bool,
) (*v3alpha1api.ListDatabase, error) { ) (*postgresflex.ListDatabase, error) {
if projectId == "" || region == "" || instanceId == "" { if projectId == "" || region == "" || instanceId == "" {
return nil, fmt.Errorf("all parameters (project, region, instance) are required") return nil, fmt.Errorf("all parameters (project, region, instance) are required")
} }
@ -59,18 +59,18 @@ func getDatabase(
for page := int32(1); ; page++ { for page := int32(1); ; page++ {
res, err := client.ListDatabasesRequest(ctx, projectId, region, instanceId). res, err := client.ListDatabasesRequest(ctx, projectId, region, instanceId).
Page(page).Size(pageSize).Sort(v3alpha1api.DATABASESORT_DATABASE_ID_ASC).Execute() Page(page).Size(pageSize).Sort(postgresflex.DATABASESORT_DATABASE_ID_ASC).Execute()
if err != nil { if err != nil {
return nil, fmt.Errorf("requesting database list (page %d): %w", page, err) return nil, fmt.Errorf("requesting database list (page %d): %w", page, err)
} }
// If the API returns no databases, we have reached the end of the list. // If the API returns no databases, we have reached the end of the list.
if len(res.Databases) == 0 { if res.Databases == nil || len(*res.Databases) == 0 {
break break
} }
// Iterate over databases to find a match // Iterate over databases to find a match
for _, db := range res.Databases { for _, db := range *res.Databases {
if filter(db) { if filter(db) {
foundDb := db foundDb := db
return &foundDb, nil return &foundDb, nil
@ -82,6 +82,10 @@ func getDatabase(
} }
// cleanString removes leading and trailing quotes which are sometimes returned by the API. // cleanString removes leading and trailing quotes which are sometimes returned by the API.
func cleanString(s string) string { func cleanString(s *string) *string {
return strings.Trim(s, "\"") if s == nil {
return nil
}
res := strings.Trim(*s, "\"")
return &res
} }

View file

@ -5,99 +5,126 @@ import (
"testing" "testing"
"github.com/google/go-cmp/cmp" "github.com/google/go-cmp/cmp"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api" "github.com/stackitcloud/stackit-sdk-go/core/utils"
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
) )
type mockRequest struct {
executeFunc func() (*postgresflex.ListDatabasesResponse, error)
}
func (m *mockRequest) Page(_ int32) postgresflex.ApiListDatabasesRequestRequest { return m }
func (m *mockRequest) Size(_ int32) postgresflex.ApiListDatabasesRequestRequest { return m }
func (m *mockRequest) Sort(_ postgresflex.DatabaseSort) postgresflex.ApiListDatabasesRequestRequest {
return m
}
func (m *mockRequest) Execute() (*postgresflex.ListDatabasesResponse, error) {
return m.executeFunc()
}
type mockDBClient struct {
executeRequest func() postgresflex.ApiListDatabasesRequestRequest
}
var _ databaseClientReader = (*mockDBClient)(nil)
func (m *mockDBClient) ListDatabasesRequest(
_ context.Context,
_, _, _ string,
) postgresflex.ApiListDatabasesRequestRequest {
return m.executeRequest()
}
func TestGetDatabase(t *testing.T) { func TestGetDatabase(t *testing.T) {
mockResp := func(page int32) (*v3alpha1api.ListDatabasesResponse, error) { mockResp := func(page int64) (*postgresflex.ListDatabasesResponse, error) {
if page == 1 { if page == 1 {
return &v3alpha1api.ListDatabasesResponse{ return &postgresflex.ListDatabasesResponse{
Databases: []v3alpha1api.ListDatabase{ Databases: &[]postgresflex.ListDatabase{
{Id: int32(1), Name: "first"}, {Id: utils.Ptr(int64(1)), Name: utils.Ptr("first")},
{Id: int32(2), Name: "second"}, {Id: utils.Ptr(int64(2)), Name: utils.Ptr("second")},
}, },
Pagination: v3alpha1api.Pagination{ Pagination: &postgresflex.Pagination{
Page: int32(1), Page: utils.Ptr(int64(1)),
TotalPages: int32(2), TotalPages: utils.Ptr(int64(2)),
Size: int32(3), Size: utils.Ptr(int64(3)),
}, },
}, nil }, nil
} }
if page == 2 { if page == 2 {
return &v3alpha1api.ListDatabasesResponse{ return &postgresflex.ListDatabasesResponse{
Databases: []v3alpha1api.ListDatabase{{Id: int32(3), Name: "three"}}, Databases: &[]postgresflex.ListDatabase{{Id: utils.Ptr(int64(3)), Name: utils.Ptr("three")}},
Pagination: v3alpha1api.Pagination{ Pagination: &postgresflex.Pagination{
Page: int32(2), Page: utils.Ptr(int64(2)),
TotalPages: int32(2), TotalPages: utils.Ptr(int64(2)),
Size: int32(3), Size: utils.Ptr(int64(3)),
}, },
}, nil }, nil
} }
return &v3alpha1api.ListDatabasesResponse{ return &postgresflex.ListDatabasesResponse{
Databases: []v3alpha1api.ListDatabase{}, Databases: &[]postgresflex.ListDatabase{},
Pagination: v3alpha1api.Pagination{ Pagination: &postgresflex.Pagination{
Page: int32(3), Page: utils.Ptr(int64(3)),
TotalPages: int32(2), TotalPages: utils.Ptr(int64(2)),
Size: int32(3), Size: utils.Ptr(int64(3)),
}, },
}, nil }, nil
} }
tests := []struct { tests := []struct {
description string description string
projectID string projectId string
region string region string
instanceID string instanceId string
wantErr bool wantErr bool
wantDbName string wantDbName string
wantDbID int32 wantDbId int64
}{ }{
{ {
description: "Success - Found by name on first page", description: "Success - Found by name on first page",
projectID: "pid", region: "reg", instanceID: "inst", projectId: "pid", region: "reg", instanceId: "inst",
wantErr: false, wantErr: false,
wantDbName: "second", wantDbName: "second",
}, },
{ {
description: "Success - Found by id on first page", description: "Success - Found by id on first page",
projectID: "pid", region: "reg", instanceID: "inst", projectId: "pid", region: "reg", instanceId: "inst",
wantErr: false, wantErr: false,
wantDbID: 2, wantDbId: 2,
}, },
{ {
description: "Success - Found by name on second page", description: "Success - Found by name on second page",
projectID: "pid", region: "reg", instanceID: "inst", projectId: "pid", region: "reg", instanceId: "inst",
wantErr: false, wantErr: false,
wantDbName: "three", wantDbName: "three",
}, },
{ {
description: "Success - Found by id on second page", description: "Success - Found by id on second page",
projectID: "pid", region: "reg", instanceID: "inst", projectId: "pid", region: "reg", instanceId: "inst",
wantErr: false, wantErr: false,
wantDbID: 1, wantDbId: 1,
}, },
{ {
description: "Error - API failure", description: "Error - API failure",
projectID: "pid", region: "reg", instanceID: "inst", projectId: "pid", region: "reg", instanceId: "inst",
wantErr: true, wantErr: true,
}, },
{ {
description: "Error - Missing parameters", description: "Error - Missing parameters",
projectID: "", region: "reg", instanceID: "inst", projectId: "", region: "reg", instanceId: "inst",
wantErr: true, wantErr: true,
}, },
{ {
description: "Error - Search by name not found after all pages", description: "Error - Search by name not found after all pages",
projectID: "pid", region: "reg", instanceID: "inst", projectId: "pid", region: "reg", instanceId: "inst",
wantDbName: "non-existent", wantDbName: "non-existent",
wantErr: true, wantErr: true,
}, },
{ {
description: "Error - Search by id not found after all pages", description: "Error - Search by id not found after all pages",
projectID: "pid", region: "reg", instanceID: "inst", projectId: "pid", region: "reg", instanceId: "inst",
wantDbID: 999999, wantDbId: 999999,
wantErr: true, wantErr: true,
}, },
} }
@ -105,46 +132,47 @@ func TestGetDatabase(t *testing.T) {
for _, tt := range tests { for _, tt := range tests {
t.Run( t.Run(
tt.description, func(t *testing.T) { tt.description, func(t *testing.T) {
var currentPage int32 var currentPage int64
client := &mockDBClient{
mockCall := func(_ v3alpha1api.ApiListDatabasesRequestRequest) (*v3alpha1api.ListDatabasesResponse, error) { executeRequest: func() postgresflex.ApiListDatabasesRequestRequest {
return &mockRequest{
executeFunc: func() (*postgresflex.ListDatabasesResponse, error) {
currentPage++ currentPage++
return mockResp(currentPage) return mockResp(currentPage)
},
}
},
} }
client := &v3alpha1api.DefaultAPIServiceMock{ var actual *postgresflex.ListDatabase
ListDatabasesRequestExecuteMock: &mockCall,
}
var actual *v3alpha1api.ListDatabase
var errDB error var errDB error
if tt.wantDbName != "" { if tt.wantDbName != "" {
actual, errDB = getDatabaseByName( actual, errDB = getDatabaseByName(
t.Context(), t.Context(),
client, client,
tt.projectID, tt.projectId,
tt.region, tt.region,
tt.instanceID, tt.instanceId,
tt.wantDbName, tt.wantDbName,
) )
} else if tt.wantDbID != 0 { } else if tt.wantDbId != 0 {
actual, errDB = getDatabaseById( actual, errDB = getDatabaseById(
t.Context(), t.Context(),
client, client,
tt.projectID, tt.projectId,
tt.region, tt.region,
tt.instanceID, tt.instanceId,
int64(tt.wantDbID), tt.wantDbId,
) )
} else { } else {
actual, errDB = getDatabase( actual, errDB = getDatabase(
context.Background(), context.Background(),
client, client,
tt.projectID, tt.projectId,
tt.region, tt.region,
tt.instanceID, tt.instanceId,
func(_ v3alpha1api.ListDatabase) bool { return false }, func(_ postgresflex.ListDatabase) bool { return false },
) )
} }
@ -153,14 +181,14 @@ func TestGetDatabase(t *testing.T) {
return return
} }
if !tt.wantErr && tt.wantDbName != "" && actual != nil { if !tt.wantErr && tt.wantDbName != "" && actual != nil {
if actual.Name != tt.wantDbName { if *actual.Name != tt.wantDbName {
t.Errorf("getDatabaseByNameOrID() got name = %v, want %v", actual.Name, tt.wantDbName) t.Errorf("getDatabaseByNameOrID() got name = %v, want %v", *actual.Name, tt.wantDbName)
} }
} }
if !tt.wantErr && tt.wantDbID != 0 && actual != nil { if !tt.wantErr && tt.wantDbId != 0 && actual != nil {
if actual.Id != tt.wantDbID { if *actual.Id != tt.wantDbId {
t.Errorf("getDatabaseByNameOrID() got id = %v, want %v", actual.Id, tt.wantDbID) t.Errorf("getDatabaseByNameOrID() got id = %v, want %v", *actual.Id, tt.wantDbId)
} }
} }
}, },
@ -171,18 +199,23 @@ func TestGetDatabase(t *testing.T) {
func TestCleanString(t *testing.T) { func TestCleanString(t *testing.T) {
testcases := []struct { testcases := []struct {
name string name string
given string given *string
expected string expected *string
}{ }{
{ {
name: "should remove quotes", name: "should remove quotes",
given: "\"quoted\"", given: utils.Ptr("\"quoted\""),
expected: "quoted", expected: utils.Ptr("quoted"),
},
{
name: "should handle nil",
given: nil,
expected: nil,
}, },
{ {
name: "should not change unquoted string", name: "should not change unquoted string",
given: "unquoted", given: utils.Ptr("unquoted"),
expected: "unquoted", expected: utils.Ptr("unquoted"),
}, },
} }

View file

@ -5,21 +5,20 @@ import (
"strconv" "strconv"
"github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-framework/types"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
) )
// mapFields maps fields from a ListDatabase API response to a resourceModel for the data source. // mapFields maps fields from a ListDatabase API response to a resourceModel for the data source.
func mapFields( func mapFields(
source *v3alpha1api.ListDatabase, source *postgresflexalpha.ListDatabase,
model *dataSourceModel, model *dataSourceModel,
region string, region string,
) error { ) error {
if source == nil { if source == nil {
return fmt.Errorf("response is nil") return fmt.Errorf("response is nil")
} }
if source.Id == 0 { if source.Id == nil || *source.Id == 0 {
return fmt.Errorf("id not present") return fmt.Errorf("id not present")
} }
if model == nil { if model == nil {
@ -29,8 +28,8 @@ func mapFields(
var databaseId int64 var databaseId int64
if model.DatabaseId.ValueInt64() != 0 { if model.DatabaseId.ValueInt64() != 0 {
databaseId = model.DatabaseId.ValueInt64() databaseId = model.DatabaseId.ValueInt64()
} else if source.Id != 0 { } else if source.Id != nil {
databaseId = int64(source.Id) databaseId = *source.Id
} else { } else {
return fmt.Errorf("database id not present") return fmt.Errorf("database id not present")
} }
@ -38,7 +37,7 @@ func mapFields(
model.Id = types.Int64Value(databaseId) model.Id = types.Int64Value(databaseId)
model.DatabaseId = types.Int64Value(databaseId) model.DatabaseId = types.Int64Value(databaseId)
model.Name = types.StringValue(source.GetName()) model.Name = types.StringValue(source.GetName())
model.Owner = types.StringValue(cleanString(source.Owner)) model.Owner = types.StringPointerValue(cleanString(source.Owner))
model.Region = types.StringValue(region) model.Region = types.StringValue(region)
model.ProjectId = types.StringValue(model.ProjectId.ValueString()) model.ProjectId = types.StringValue(model.ProjectId.ValueString())
model.InstanceId = types.StringValue(model.InstanceId.ValueString()) model.InstanceId = types.StringValue(model.InstanceId.ValueString())
@ -52,12 +51,12 @@ func mapFields(
return nil return nil
} }
// mapResourceFields maps fields from a GetDatabase API response to a resourceModel for the resource. // mapResourceFields maps fields from a ListDatabase API response to a resourceModel for the resource.
func mapResourceFields(source *v3alpha1api.GetDatabaseResponse, model *resourceModel) error { func mapResourceFields(source *postgresflexalpha.ListDatabase, model *resourceModel) error {
if source == nil { if source == nil {
return fmt.Errorf("response is nil") return fmt.Errorf("response is nil")
} }
if source.Id == 0 { if source.Id == nil || *source.Id == 0 {
return fmt.Errorf("id not present") return fmt.Errorf("id not present")
} }
if model == nil { if model == nil {
@ -67,8 +66,8 @@ func mapResourceFields(source *v3alpha1api.GetDatabaseResponse, model *resourceM
var databaseId int64 var databaseId int64
if model.Id.ValueInt64() != 0 { if model.Id.ValueInt64() != 0 {
databaseId = model.Id.ValueInt64() databaseId = model.Id.ValueInt64()
} else if source.Id != 0 { } else if source.Id != nil {
databaseId = int64(source.Id) databaseId = *source.Id
} else { } else {
return fmt.Errorf("database id not present") return fmt.Errorf("database id not present")
} }
@ -76,18 +75,18 @@ func mapResourceFields(source *v3alpha1api.GetDatabaseResponse, model *resourceM
model.Id = types.Int64Value(databaseId) model.Id = types.Int64Value(databaseId)
model.DatabaseId = types.Int64Value(databaseId) model.DatabaseId = types.Int64Value(databaseId)
model.Name = types.StringValue(source.GetName()) model.Name = types.StringValue(source.GetName())
model.Owner = types.StringValue(cleanString(source.Owner)) model.Owner = types.StringPointerValue(cleanString(source.Owner))
return nil return nil
} }
// toCreatePayload converts the resource model to an API create payload. // toCreatePayload converts the resource model to an API create payload.
func toCreatePayload(model *resourceModel) (*v3alpha1api.CreateDatabaseRequestPayload, error) { func toCreatePayload(model *resourceModel) (*postgresflexalpha.CreateDatabaseRequestPayload, error) {
if model == nil { if model == nil {
return nil, fmt.Errorf("nil model") return nil, fmt.Errorf("nil model")
} }
return &v3alpha1api.CreateDatabaseRequestPayload{ return &postgresflexalpha.CreateDatabaseRequestPayload{
Name: model.Name.ValueString(), Name: model.Name.ValueStringPointer(),
Owner: model.Owner.ValueStringPointer(), Owner: model.Owner.ValueStringPointer(),
}, nil }, nil
} }

View file

@ -6,9 +6,7 @@ import (
"github.com/google/go-cmp/cmp" "github.com/google/go-cmp/cmp"
"github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-framework/types"
"github.com/stackitcloud/stackit-sdk-go/core/utils" "github.com/stackitcloud/stackit-sdk-go/core/utils"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
postgresflexalpha "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
datasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/datasources_gen" datasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/datasources_gen"
) )
@ -32,9 +30,9 @@ func TestMapFields(t *testing.T) {
name: "should map fields correctly", name: "should map fields correctly",
given: given{ given: given{
source: &postgresflexalpha.ListDatabase{ source: &postgresflexalpha.ListDatabase{
Id: int32(1), Id: utils.Ptr(int64(1)),
Name: "my-db", Name: utils.Ptr("my-db"),
Owner: "my-owner", Owner: utils.Ptr("\"my-owner\""),
}, },
model: &dataSourceModel{ model: &dataSourceModel{
DatabaseModel: datasource.DatabaseModel{ DatabaseModel: datasource.DatabaseModel{
@ -63,8 +61,8 @@ func TestMapFields(t *testing.T) {
name: "should preserve existing model ID", name: "should preserve existing model ID",
given: given{ given: given{
source: &postgresflexalpha.ListDatabase{ source: &postgresflexalpha.ListDatabase{
Id: int32(1), Id: utils.Ptr(int64(1)),
Name: "my-db", Name: utils.Ptr("my-db"),
}, },
model: &dataSourceModel{ model: &dataSourceModel{
DatabaseModel: datasource.DatabaseModel{ DatabaseModel: datasource.DatabaseModel{
@ -80,8 +78,7 @@ func TestMapFields(t *testing.T) {
DatabaseModel: datasource.DatabaseModel{ DatabaseModel: datasource.DatabaseModel{
Id: types.Int64Value(1), Id: types.Int64Value(1),
Name: types.StringValue("my-db"), Name: types.StringValue("my-db"),
Owner: types.StringValue(""), Owner: types.StringNull(), DatabaseId: types.Int64Value(1),
DatabaseId: types.Int64Value(1),
Region: types.StringValue("eu01"), Region: types.StringValue("eu01"),
InstanceId: types.StringValue("my-instance"), InstanceId: types.StringValue("my-instance"),
ProjectId: types.StringValue("my-project"), ProjectId: types.StringValue("my-project"),
@ -101,7 +98,7 @@ func TestMapFields(t *testing.T) {
{ {
name: "should fail on nil source ID", name: "should fail on nil source ID",
given: given{ given: given{
source: &postgresflexalpha.ListDatabase{Id: 0}, source: &postgresflexalpha.ListDatabase{Id: nil},
model: &dataSourceModel{}, model: &dataSourceModel{},
}, },
expected: expected{err: true}, expected: expected{err: true},
@ -109,7 +106,7 @@ func TestMapFields(t *testing.T) {
{ {
name: "should fail on nil model", name: "should fail on nil model",
given: given{ given: given{
source: &postgresflexalpha.ListDatabase{Id: int32(1)}, source: &postgresflexalpha.ListDatabase{Id: utils.Ptr(int64(1))},
model: nil, model: nil,
}, },
expected: expected{err: true}, expected: expected{err: true},
@ -135,7 +132,7 @@ func TestMapFields(t *testing.T) {
func TestMapResourceFields(t *testing.T) { func TestMapResourceFields(t *testing.T) {
type given struct { type given struct {
source *postgresflexalpha.GetDatabaseResponse source *postgresflexalpha.ListDatabase
model *resourceModel model *resourceModel
} }
type expected struct { type expected struct {
@ -151,10 +148,10 @@ func TestMapResourceFields(t *testing.T) {
{ {
name: "should map fields correctly", name: "should map fields correctly",
given: given{ given: given{
source: &postgresflexalpha.GetDatabaseResponse{ source: &postgresflexalpha.ListDatabase{
Id: int32(1), Id: utils.Ptr(int64(1)),
Name: "my-db", Name: utils.Ptr("my-db"),
Owner: "my-owner", Owner: utils.Ptr("\"my-owner\""),
}, },
model: &resourceModel{}, model: &resourceModel{},
}, },
@ -218,7 +215,7 @@ func TestToCreatePayload(t *testing.T) {
}, },
expected: expected{ expected: expected{
payload: &postgresflexalpha.CreateDatabaseRequestPayload{ payload: &postgresflexalpha.CreateDatabaseRequestPayload{
Name: "my-db", Name: utils.Ptr("my-db"),
Owner: utils.Ptr("my-owner"), Owner: utils.Ptr("my-owner"),
}, },
}, },

View file

@ -3,25 +3,25 @@ package postgresflexalpha
import ( import (
"context" "context"
_ "embed" _ "embed"
"errors"
"fmt" "fmt"
"math" "math"
"net/http"
"strconv" "strconv"
"strings" "strings"
"time"
"github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource" "github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/identityschema" "github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
"github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-log/tflog" "github.com/hashicorp/terraform-plugin-log/tflog"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api" "github.com/stackitcloud/stackit-sdk-go/core/oapierror"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
postgresflexalphaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/resources_gen" postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/resources_gen"
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils" postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
postgresflexalphaWait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/postgresflexalpha"
) )
var ( var (
@ -32,6 +32,9 @@ var (
_ resource.ResourceWithModifyPlan = &databaseResource{} _ resource.ResourceWithModifyPlan = &databaseResource{}
_ resource.ResourceWithIdentity = &databaseResource{} _ resource.ResourceWithIdentity = &databaseResource{}
// Define errors
errDatabaseNotFound = errors.New("database not found")
// Error message constants // Error message constants
extractErrorSummary = "extracting failed" extractErrorSummary = "extracting failed"
extractErrorMessage = "Extracting identity data: %v" extractErrorMessage = "Extracting identity data: %v"
@ -43,7 +46,7 @@ func NewDatabaseResource() resource.Resource {
} }
// resourceModel describes the resource data model. // resourceModel describes the resource data model.
type resourceModel = postgresflexalphaResGen.DatabaseModel type resourceModel = postgresflexalpha2.DatabaseModel
// DatabaseResourceIdentityModel describes the resource's identity attributes. // DatabaseResourceIdentityModel describes the resource's identity attributes.
type DatabaseResourceIdentityModel struct { type DatabaseResourceIdentityModel struct {
@ -55,7 +58,7 @@ type DatabaseResourceIdentityModel struct {
// databaseResource is the resource implementation. // databaseResource is the resource implementation.
type databaseResource struct { type databaseResource struct {
client *v3alpha1api.APIClient client *postgresflexalpha.APIClient
providerData core.ProviderData providerData core.ProviderData
} }
@ -122,7 +125,7 @@ var modifiersFileByte []byte
// Schema defines the schema for the resource. // Schema defines the schema for the resource.
func (r *databaseResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { func (r *databaseResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
s := postgresflexalphaResGen.DatabaseResourceSchema(ctx) s := postgresflexalpha2.DatabaseResourceSchema(ctx)
fields, err := utils.ReadModifiersConfig(modifiersFileByte) fields, err := utils.ReadModifiersConfig(modifiersFileByte)
if err != nil { if err != nil {
@ -168,7 +171,6 @@ func (r *databaseResource) Create(
req resource.CreateRequest, req resource.CreateRequest,
resp *resource.CreateResponse, resp *resource.CreateResponse,
) { // nolint:gocritic // function signature required by Terraform ) { // nolint:gocritic // function signature required by Terraform
const funcErrorSummary = "[database CREATE] error"
var model resourceModel var model resourceModel
diags := req.Plan.Get(ctx, &model) diags := req.Plan.Get(ctx, &model)
resp.Diagnostics.Append(diags...) resp.Diagnostics.Append(diags...)
@ -179,7 +181,7 @@ func (r *databaseResource) Create(
ctx = core.InitProviderContext(ctx) ctx = core.InitProviderContext(ctx)
projectId := model.ProjectId.ValueString() projectId := model.ProjectId.ValueString()
region := model.Region.ValueString() region := model.InstanceId.ValueString()
instanceId := model.InstanceId.ValueString() instanceId := model.InstanceId.ValueString()
ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "project_id", projectId)
@ -192,34 +194,33 @@ func (r *databaseResource) Create(
core.LogAndAddError( core.LogAndAddError(
ctx, ctx,
&resp.Diagnostics, &resp.Diagnostics,
funcErrorSummary, "Error creating database",
fmt.Sprintf("Creating API payload: %v", err), fmt.Sprintf("Creating API payload: %v", err),
) )
return return
} }
// Create new database // Create new database
databaseResp, err := r.client.DefaultAPI.CreateDatabaseRequest( databaseResp, err := r.client.CreateDatabaseRequest(
ctx, ctx,
projectId, projectId,
region, region,
instanceId, instanceId,
).CreateDatabaseRequestPayload(*payload).Execute() ).CreateDatabaseRequestPayload(*payload).Execute()
if err != nil { if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, funcErrorSummary, fmt.Sprintf("Calling API: %v", err)) core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating database", fmt.Sprintf("Calling API: %v", err))
return return
} }
dbID, ok := databaseResp.GetIdOk() if databaseResp == nil || databaseResp.Id == nil {
if !ok {
core.LogAndAddError( core.LogAndAddError(
ctx, ctx,
&resp.Diagnostics, &resp.Diagnostics,
funcErrorSummary, "Error creating database",
"API didn't return database Id. A database might although have been created", "API didn't return database Id. A database might have been created",
) )
return return
} }
databaseId := int64(*dbID) databaseId := *databaseResp.Id
ctx = tflog.SetField(ctx, "database_id", databaseId) ctx = tflog.SetField(ctx, "database_id", databaseId)
ctx = core.LogResponse(ctx) ctx = core.LogResponse(ctx)
@ -235,15 +236,12 @@ func (r *databaseResource) Create(
return return
} }
database, err := postgresflexalphaWait.GetDatabaseByIdWaitHandler(ctx, r.client.DefaultAPI, projectId, instanceId, region, databaseId). database, err := getDatabaseById(ctx, r.client, projectId, region, instanceId, databaseId)
SetTimeout(15 * time.Minute).
SetSleepBeforeWait(15 * time.Second).
WaitWithContext(ctx)
if err != nil { if err != nil {
core.LogAndAddError( core.LogAndAddError(
ctx, ctx,
&resp.Diagnostics, &resp.Diagnostics,
funcErrorSummary, "Error creating database",
fmt.Sprintf("Getting database details after creation: %v", err), fmt.Sprintf("Getting database details after creation: %v", err),
) )
return return
@ -255,8 +253,8 @@ func (r *databaseResource) Create(
core.LogAndAddError( core.LogAndAddError(
ctx, ctx,
&resp.Diagnostics, &resp.Diagnostics,
funcErrorSummary, "Error creating database",
fmt.Sprintf("map resource fields: %v", err), fmt.Sprintf("Processing API payload: %v", err),
) )
return return
} }
@ -282,29 +280,38 @@ func (r *databaseResource) Read(
return return
} }
// Read identity data
var identityData DatabaseResourceIdentityModel
resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
if resp.Diagnostics.HasError() {
return
}
ctx = core.InitProviderContext(ctx) ctx = core.InitProviderContext(ctx)
projectId := model.ProjectId.ValueString() projectId, instanceId, region, databaseId, errExt := r.extractIdentityData(model, identityData)
instanceId := model.InstanceId.ValueString() if errExt != nil {
region := model.Region.ValueString() core.LogAndAddError(
databaseId := model.DatabaseId.ValueInt64() ctx,
&resp.Diagnostics,
extractErrorSummary,
fmt.Sprintf(extractErrorMessage, errExt),
)
}
ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "instance_id", instanceId) ctx = tflog.SetField(ctx, "instance_id", instanceId)
ctx = tflog.SetField(ctx, "region", region) ctx = tflog.SetField(ctx, "region", region)
ctx = tflog.SetField(ctx, "database_id", databaseId) ctx = tflog.SetField(ctx, "database_id", databaseId)
databaseResp, err := postgresflexalphaWait.GetDatabaseByIdWaitHandler(ctx, r.client.DefaultAPI, projectId, instanceId, region, databaseId). databaseResp, err := getDatabaseById(ctx, r.client, projectId, region, instanceId, databaseId)
SetTimeout(15 * time.Minute).
SetSleepBeforeWait(15 * time.Second).
WaitWithContext(ctx)
if err != nil { if err != nil {
core.LogAndAddError( oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
ctx, if (ok && oapiErr.StatusCode == http.StatusNotFound) || errors.Is(err, errDatabaseNotFound) {
&resp.Diagnostics, resp.State.RemoveResource(ctx)
"Error creating database", return
fmt.Sprintf("Getting database details after creation: %v", err), }
) core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading database", fmt.Sprintf("Calling API: %v", err))
return return
} }
@ -327,7 +334,7 @@ func (r *databaseResource) Read(
ProjectID: types.StringValue(projectId), ProjectID: types.StringValue(projectId),
Region: types.StringValue(region), Region: types.StringValue(region),
InstanceID: types.StringValue(instanceId), InstanceID: types.StringValue(instanceId),
DatabaseID: types.Int64Value(int64(databaseResp.GetId())), DatabaseID: types.Int64Value(databaseId),
} }
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
if resp.Diagnostics.HasError() { if resp.Diagnostics.HasError() {
@ -356,12 +363,30 @@ func (r *databaseResource) Update(
return return
} }
// Read identity data
var identityData DatabaseResourceIdentityModel
resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
if resp.Diagnostics.HasError() {
return
}
ctx = core.InitProviderContext(ctx) ctx = core.InitProviderContext(ctx)
projectId := model.ProjectId.ValueString() projectId, instanceId, region, databaseId64, errExt := r.extractIdentityData(model, identityData)
instanceId := model.InstanceId.ValueString() if errExt != nil {
region := model.Region.ValueString() core.LogAndAddError(
databaseId := model.DatabaseId.ValueInt64() ctx,
&resp.Diagnostics,
extractErrorSummary,
fmt.Sprintf(extractErrorMessage, errExt),
)
}
if databaseId64 > math.MaxInt32 {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (databaseId)")
return
}
databaseId := int32(databaseId64)
ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "instance_id", instanceId) ctx = tflog.SetField(ctx, "instance_id", instanceId)
@ -377,7 +402,7 @@ func (r *databaseResource) Update(
} }
modified := false modified := false
var payload v3alpha1api.UpdateDatabasePartiallyRequestPayload var payload postgresflexalpha.UpdateDatabasePartiallyRequestPayload
if stateModel.Name != model.Name { if stateModel.Name != model.Name {
payload.Name = model.Name.ValueStringPointer() payload.Name = model.Name.ValueStringPointer()
modified = true modified = true
@ -393,18 +418,13 @@ func (r *databaseResource) Update(
return return
} }
if databaseId > math.MaxInt32 {
core.LogAndAddError(ctx, &resp.Diagnostics, "error updating database", "databaseID out of bounds for int32")
return
}
databaseID32 := int32(databaseId) //nolint:gosec // TODO
// Update existing database // Update existing database
err := r.client.DefaultAPI.UpdateDatabasePartiallyRequest( err := r.client.UpdateDatabasePartiallyRequest(
ctx, ctx,
projectId, projectId,
region, region,
instanceId, instanceId,
databaseID32, databaseId,
).UpdateDatabasePartiallyRequestPayload(payload).Execute() ).UpdateDatabasePartiallyRequestPayload(payload).Execute()
if err != nil { if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "error updating database", err.Error()) core.LogAndAddError(ctx, &resp.Diagnostics, "error updating database", err.Error())
@ -413,12 +433,15 @@ func (r *databaseResource) Update(
ctx = core.LogResponse(ctx) ctx = core.LogResponse(ctx)
databaseResp, err := postgresflexalphaWait.GetDatabaseByIdWaitHandler(ctx, r.client.DefaultAPI, projectId, instanceId, region, databaseId). // Map response body to schema
SetTimeout(15 * time.Minute). databaseResp, err := getDatabaseById(ctx, r.client, projectId, region, instanceId, databaseId64)
SetSleepBeforeWait(15 * time.Second).
WaitWithContext(ctx)
if err != nil { if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "error updating database", err.Error()) oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
if (ok && oapiErr.StatusCode == http.StatusNotFound) || errors.Is(err, errDatabaseNotFound) {
resp.State.RemoveResource(ctx)
return
}
core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading database", fmt.Sprintf("Calling API: %v", err))
return return
} }
@ -441,7 +464,7 @@ func (r *databaseResource) Update(
ProjectID: types.StringValue(projectId), ProjectID: types.StringValue(projectId),
Region: types.StringValue(region), Region: types.StringValue(region),
InstanceID: types.StringValue(instanceId), InstanceID: types.StringValue(instanceId),
DatabaseID: types.Int64Value(databaseId), DatabaseID: types.Int64Value(databaseId64),
} }
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...) resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
if resp.Diagnostics.HasError() { if resp.Diagnostics.HasError() {
@ -478,7 +501,7 @@ func (r *databaseResource) Delete(
ctx = core.InitProviderContext(ctx) ctx = core.InitProviderContext(ctx)
projectId, region, instanceId, databaseId64, errExt := r.extractIdentityData(model, identityData) projectId, instanceId, region, databaseId64, errExt := r.extractIdentityData(model, identityData)
if errExt != nil { if errExt != nil {
core.LogAndAddError( core.LogAndAddError(
ctx, ctx,
@ -492,14 +515,14 @@ func (r *databaseResource) Delete(
core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (databaseId)") core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (databaseId)")
return return
} }
databaseId := int32(databaseId64) // nolint:gosec // check is performed above databaseId := int32(databaseId64)
ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "instance_id", instanceId) ctx = tflog.SetField(ctx, "instance_id", instanceId)
ctx = tflog.SetField(ctx, "region", region) ctx = tflog.SetField(ctx, "region", region)
ctx = tflog.SetField(ctx, "database_id", databaseId) ctx = tflog.SetField(ctx, "database_id", databaseId)
// Delete existing record set // Delete existing record set
err := r.client.DefaultAPI.DeleteDatabaseRequest(ctx, projectId, region, instanceId, databaseId).Execute() err := r.client.DeleteDatabaseRequestExecute(ctx, projectId, region, instanceId, databaseId)
if err != nil { if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting database", fmt.Sprintf("Calling API: %v", err)) core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting database", fmt.Sprintf("Calling API: %v", err))
} }
@ -516,9 +539,11 @@ func (r *databaseResource) ImportState(
req resource.ImportStateRequest, req resource.ImportStateRequest,
resp *resource.ImportStateResponse, resp *resource.ImportStateResponse,
) { ) {
ctx = core.InitProviderContext(ctx) ctx = core.InitProviderContext(ctx)
if req.ID != "" { if req.ID != "" {
idParts := strings.Split(req.ID, core.Separator) idParts := strings.Split(req.ID, core.Separator)
if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" { if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" {

View file

@ -1,4 +1,4 @@
package postgresflexalphaflavor package postgresFlexAlphaFlavor
import ( import (
"context" "context"
@ -8,8 +8,7 @@ import (
"github.com/hashicorp/terraform-plugin-framework/datasource/schema" "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
"github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes" "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
postgresflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavors/datasources_gen" postgresflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavors/datasources_gen"
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils" postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
@ -17,7 +16,6 @@ import (
"github.com/hashicorp/terraform-plugin-framework/datasource" "github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-log/tflog" "github.com/hashicorp/terraform-plugin-log/tflog"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
) )
@ -30,13 +28,13 @@ type FlavorModel struct {
ProjectId types.String `tfsdk:"project_id"` ProjectId types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"` Region types.String `tfsdk:"region"`
StorageClass types.String `tfsdk:"storage_class"` StorageClass types.String `tfsdk:"storage_class"`
Cpu types.Int32 `tfsdk:"cpu"` Cpu types.Int64 `tfsdk:"cpu"`
Description types.String `tfsdk:"description"` Description types.String `tfsdk:"description"`
Id types.String `tfsdk:"id"` Id types.String `tfsdk:"id"`
FlavorId types.String `tfsdk:"flavor_id"` FlavorId types.String `tfsdk:"flavor_id"`
MaxGb types.Int32 `tfsdk:"max_gb"` MaxGb types.Int64 `tfsdk:"max_gb"`
Memory types.Int32 `tfsdk:"ram"` Memory types.Int64 `tfsdk:"ram"`
MinGb types.Int32 `tfsdk:"min_gb"` MinGb types.Int64 `tfsdk:"min_gb"`
NodeType types.String `tfsdk:"node_type"` NodeType types.String `tfsdk:"node_type"`
StorageClasses types.List `tfsdk:"storage_classes"` StorageClasses types.List `tfsdk:"storage_classes"`
} }
@ -48,7 +46,7 @@ func NewFlavorDataSource() datasource.DataSource {
// flavorDataSource is the data source implementation. // flavorDataSource is the data source implementation.
type flavorDataSource struct { type flavorDataSource struct {
client *v3alpha1api.APIClient client *postgresflexalpha.APIClient
providerData core.ProviderData providerData core.ProviderData
} }
@ -86,12 +84,12 @@ func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaReques
Description: "The flavor description.", Description: "The flavor description.",
MarkdownDescription: "The flavor description.", MarkdownDescription: "The flavor description.",
}, },
"cpu": schema.Int32Attribute{ "cpu": schema.Int64Attribute{
Required: true, Required: true,
Description: "The cpu count of the instance.", Description: "The cpu count of the instance.",
MarkdownDescription: "The cpu count of the instance.", MarkdownDescription: "The cpu count of the instance.",
}, },
"ram": schema.Int32Attribute{ "ram": schema.Int64Attribute{
Required: true, Required: true,
Description: "The memory of the instance in Gibibyte.", Description: "The memory of the instance in Gibibyte.",
MarkdownDescription: "The memory of the instance in Gibibyte.", MarkdownDescription: "The memory of the instance in Gibibyte.",
@ -116,12 +114,12 @@ func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaReques
Description: "The flavor id of the instance flavor.", Description: "The flavor id of the instance flavor.",
MarkdownDescription: "The flavor id of the instance flavor.", MarkdownDescription: "The flavor id of the instance flavor.",
}, },
"max_gb": schema.Int32Attribute{ "max_gb": schema.Int64Attribute{
Computed: true, Computed: true,
Description: "maximum storage which can be ordered for the flavor in Gigabyte.", Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.", MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
}, },
"min_gb": schema.Int32Attribute{ "min_gb": schema.Int64Attribute{
Computed: true, Computed: true,
Description: "minimum storage which is required to order in Gigabyte.", Description: "minimum storage which is required to order in Gigabyte.",
MarkdownDescription: "minimum storage which is required to order in Gigabyte.", MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
@ -138,10 +136,10 @@ func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaReques
"class": schema.StringAttribute{ "class": schema.StringAttribute{
Computed: true, Computed: true,
}, },
"max_io_per_sec": schema.Int32Attribute{ "max_io_per_sec": schema.Int64Attribute{
Computed: true, Computed: true,
}, },
"max_through_in_mb": schema.Int32Attribute{ "max_through_in_mb": schema.Int64Attribute{
Computed: true, Computed: true,
}, },
}, },
@ -171,25 +169,25 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest,
ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "region", region) ctx = tflog.SetField(ctx, "region", region)
flavors, err := getAllFlavors(ctx, r.client.DefaultAPI, projectId, region) flavors, err := getAllFlavors(ctx, r.client, projectId, region)
if err != nil { if err != nil {
core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading flavors", fmt.Sprintf("getAllFlavors: %v", err)) core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading flavors", fmt.Sprintf("getAllFlavors: %v", err))
return return
} }
var foundFlavors []v3alpha1api.ListFlavors var foundFlavors []postgresflexalpha.ListFlavors
for _, flavor := range flavors { for _, flavor := range flavors {
if model.Cpu.ValueInt32() != flavor.Cpu { if model.Cpu.ValueInt64() != *flavor.Cpu {
continue continue
} }
if model.Memory.ValueInt32() != flavor.Memory { if model.Memory.ValueInt64() != *flavor.Memory {
continue continue
} }
if model.NodeType.ValueString() != flavor.NodeType { if model.NodeType.ValueString() != *flavor.NodeType {
continue continue
} }
for _, sc := range flavor.StorageClasses { for _, sc := range *flavor.StorageClasses {
if model.StorageClass.ValueString() != sc.Class { if model.StorageClass.ValueString() != *sc.Class {
continue continue
} }
foundFlavors = append(foundFlavors, flavor) foundFlavors = append(foundFlavors, flavor)
@ -205,11 +203,11 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest,
} }
f := foundFlavors[0] f := foundFlavors[0]
model.Description = types.StringValue(f.Description) model.Description = types.StringValue(*f.Description)
model.Id = utils.BuildInternalTerraformId(model.ProjectId.ValueString(), region, f.Id) model.Id = utils.BuildInternalTerraformId(model.ProjectId.ValueString(), region, *f.Id)
model.FlavorId = types.StringValue(f.Id) model.FlavorId = types.StringValue(*f.Id)
model.MaxGb = types.Int32Value(f.MaxGB) model.MaxGb = types.Int64Value(*f.MaxGB)
model.MinGb = types.Int32Value(f.MinGB) model.MinGb = types.Int64Value(*f.MinGB)
if f.StorageClasses == nil { if f.StorageClasses == nil {
model.StorageClasses = types.ListNull(postgresflexalphaGen.StorageClassesType{ model.StorageClasses = types.ListNull(postgresflexalphaGen.StorageClassesType{
@ -219,15 +217,15 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest,
}) })
} else { } else {
var scList []attr.Value var scList []attr.Value
for _, sc := range f.StorageClasses { for _, sc := range *f.StorageClasses {
scList = append( scList = append(
scList, scList,
postgresflexalphaGen.NewStorageClassesValueMust( postgresflexalphaGen.NewStorageClassesValueMust(
postgresflexalphaGen.StorageClassesValue{}.AttributeTypes(ctx), postgresflexalphaGen.StorageClassesValue{}.AttributeTypes(ctx),
map[string]attr.Value{ map[string]attr.Value{
"class": types.StringValue(sc.Class), "class": types.StringValue(*sc.Class),
"max_io_per_sec": types.Int32Value(sc.MaxIoPerSec), "max_io_per_sec": types.Int64Value(*sc.MaxIoPerSec),
"max_through_in_mb": types.Int32Value(sc.MaxThroughInMb), "max_through_in_mb": types.Int64Value(*sc.MaxThroughInMb),
}, },
), ),
) )

View file

@ -23,7 +23,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
"flavors": schema.ListNestedAttribute{ "flavors": schema.ListNestedAttribute{
NestedObject: schema.NestedAttributeObject{ NestedObject: schema.NestedAttributeObject{
Attributes: map[string]schema.Attribute{ Attributes: map[string]schema.Attribute{
"cpu": schema.Int32Attribute{ "cpu": schema.Int64Attribute{
Computed: true, Computed: true,
Description: "The cpu count of the instance.", Description: "The cpu count of the instance.",
MarkdownDescription: "The cpu count of the instance.", MarkdownDescription: "The cpu count of the instance.",
@ -38,17 +38,17 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
Description: "The id of the instance flavor.", Description: "The id of the instance flavor.",
MarkdownDescription: "The id of the instance flavor.", MarkdownDescription: "The id of the instance flavor.",
}, },
"max_gb": schema.Int32Attribute{ "max_gb": schema.Int64Attribute{
Computed: true, Computed: true,
Description: "maximum storage which can be ordered for the flavor in Gigabyte.", Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.", MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
}, },
"memory": schema.Int32Attribute{ "memory": schema.Int64Attribute{
Computed: true, Computed: true,
Description: "The memory of the instance in Gibibyte.", Description: "The memory of the instance in Gibibyte.",
MarkdownDescription: "The memory of the instance in Gibibyte.", MarkdownDescription: "The memory of the instance in Gibibyte.",
}, },
"min_gb": schema.Int32Attribute{ "min_gb": schema.Int64Attribute{
Computed: true, Computed: true,
Description: "minimum storage which is required to order in Gigabyte.", Description: "minimum storage which is required to order in Gigabyte.",
MarkdownDescription: "minimum storage which is required to order in Gigabyte.", MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
@ -64,10 +64,10 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
"class": schema.StringAttribute{ "class": schema.StringAttribute{
Computed: true, Computed: true,
}, },
"max_io_per_sec": schema.Int32Attribute{ "max_io_per_sec": schema.Int64Attribute{
Computed: true, Computed: true,
}, },
"max_through_in_mb": schema.Int32Attribute{ "max_through_in_mb": schema.Int64Attribute{
Computed: true, Computed: true,
}, },
}, },
@ -92,7 +92,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
Description: "List of flavors available for the project.", Description: "List of flavors available for the project.",
MarkdownDescription: "List of flavors available for the project.", MarkdownDescription: "List of flavors available for the project.",
}, },
"page": schema.Int32Attribute{ "page": schema.Int64Attribute{
Optional: true, Optional: true,
Computed: true, Computed: true,
Description: "Number of the page of items list to be returned.", Description: "Number of the page of items list to be returned.",
@ -100,19 +100,19 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
}, },
"pagination": schema.SingleNestedAttribute{ "pagination": schema.SingleNestedAttribute{
Attributes: map[string]schema.Attribute{ Attributes: map[string]schema.Attribute{
"page": schema.Int32Attribute{ "page": schema.Int64Attribute{
Computed: true, Computed: true,
}, },
"size": schema.Int32Attribute{ "size": schema.Int64Attribute{
Computed: true, Computed: true,
}, },
"sort": schema.StringAttribute{ "sort": schema.StringAttribute{
Computed: true, Computed: true,
}, },
"total_pages": schema.Int32Attribute{ "total_pages": schema.Int64Attribute{
Computed: true, Computed: true,
}, },
"total_rows": schema.Int32Attribute{ "total_rows": schema.Int64Attribute{
Computed: true, Computed: true,
}, },
}, },
@ -138,7 +138,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
), ),
}, },
}, },
"size": schema.Int32Attribute{ "size": schema.Int64Attribute{
Optional: true, Optional: true,
Computed: true, Computed: true,
Description: "Number of items to be returned on each page.", Description: "Number of items to be returned on each page.",
@ -178,11 +178,11 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
type FlavorsModel struct { type FlavorsModel struct {
Flavors types.List `tfsdk:"flavors"` Flavors types.List `tfsdk:"flavors"`
Page types.Int32 `tfsdk:"page"` Page types.Int64 `tfsdk:"page"`
Pagination PaginationValue `tfsdk:"pagination"` Pagination PaginationValue `tfsdk:"pagination"`
ProjectId types.String `tfsdk:"project_id"` ProjectId types.String `tfsdk:"project_id"`
Region types.String `tfsdk:"region"` Region types.String `tfsdk:"region"`
Size types.Int32 `tfsdk:"size"` Size types.Int64 `tfsdk:"size"`
Sort types.String `tfsdk:"sort"` Sort types.String `tfsdk:"sort"`
} }
@ -221,12 +221,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags return nil, diags
} }
cpuVal, ok := cpuAttribute.(basetypes.Int32Value) cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`cpu expected to be basetypes.Int32Value, was: %T`, cpuAttribute)) fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
} }
descriptionAttribute, ok := attributes["description"] descriptionAttribute, ok := attributes["description"]
@ -275,12 +275,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags return nil, diags
} }
maxGbVal, ok := maxGbAttribute.(basetypes.Int32Value) maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`max_gb expected to be basetypes.Int32Value, was: %T`, maxGbAttribute)) fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
} }
memoryAttribute, ok := attributes["memory"] memoryAttribute, ok := attributes["memory"]
@ -293,12 +293,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags return nil, diags
} }
memoryVal, ok := memoryAttribute.(basetypes.Int32Value) memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`memory expected to be basetypes.Int32Value, was: %T`, memoryAttribute)) fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
} }
minGbAttribute, ok := attributes["min_gb"] minGbAttribute, ok := attributes["min_gb"]
@ -311,12 +311,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
return nil, diags return nil, diags
} }
minGbVal, ok := minGbAttribute.(basetypes.Int32Value) minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`min_gb expected to be basetypes.Int32Value, was: %T`, minGbAttribute)) fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
} }
nodeTypeAttribute, ok := attributes["node_type"] nodeTypeAttribute, ok := attributes["node_type"]
@ -445,12 +445,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewFlavorsValueUnknown(), diags return NewFlavorsValueUnknown(), diags
} }
cpuVal, ok := cpuAttribute.(basetypes.Int32Value) cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`cpu expected to be basetypes.Int32Value, was: %T`, cpuAttribute)) fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
} }
descriptionAttribute, ok := attributes["description"] descriptionAttribute, ok := attributes["description"]
@ -499,12 +499,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewFlavorsValueUnknown(), diags return NewFlavorsValueUnknown(), diags
} }
maxGbVal, ok := maxGbAttribute.(basetypes.Int32Value) maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`max_gb expected to be basetypes.Int32Value, was: %T`, maxGbAttribute)) fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
} }
memoryAttribute, ok := attributes["memory"] memoryAttribute, ok := attributes["memory"]
@ -517,12 +517,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewFlavorsValueUnknown(), diags return NewFlavorsValueUnknown(), diags
} }
memoryVal, ok := memoryAttribute.(basetypes.Int32Value) memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`memory expected to be basetypes.Int32Value, was: %T`, memoryAttribute)) fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
} }
minGbAttribute, ok := attributes["min_gb"] minGbAttribute, ok := attributes["min_gb"]
@ -535,12 +535,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
return NewFlavorsValueUnknown(), diags return NewFlavorsValueUnknown(), diags
} }
minGbVal, ok := minGbAttribute.(basetypes.Int32Value) minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`min_gb expected to be basetypes.Int32Value, was: %T`, minGbAttribute)) fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
} }
nodeTypeAttribute, ok := attributes["node_type"] nodeTypeAttribute, ok := attributes["node_type"]
@ -664,12 +664,12 @@ func (t FlavorsType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = FlavorsValue{} var _ basetypes.ObjectValuable = FlavorsValue{}
type FlavorsValue struct { type FlavorsValue struct {
Cpu basetypes.Int32Value `tfsdk:"cpu"` Cpu basetypes.Int64Value `tfsdk:"cpu"`
Description basetypes.StringValue `tfsdk:"description"` Description basetypes.StringValue `tfsdk:"description"`
Id basetypes.StringValue `tfsdk:"id"` Id basetypes.StringValue `tfsdk:"id"`
MaxGb basetypes.Int32Value `tfsdk:"max_gb"` MaxGb basetypes.Int64Value `tfsdk:"max_gb"`
Memory basetypes.Int32Value `tfsdk:"memory"` Memory basetypes.Int64Value `tfsdk:"memory"`
MinGb basetypes.Int32Value `tfsdk:"min_gb"` MinGb basetypes.Int64Value `tfsdk:"min_gb"`
NodeType basetypes.StringValue `tfsdk:"node_type"` NodeType basetypes.StringValue `tfsdk:"node_type"`
StorageClasses basetypes.ListValue `tfsdk:"storage_classes"` StorageClasses basetypes.ListValue `tfsdk:"storage_classes"`
state attr.ValueState state attr.ValueState
@ -681,12 +681,12 @@ func (v FlavorsValue) ToTerraformValue(ctx context.Context) (tftypes.Value, erro
var val tftypes.Value var val tftypes.Value
var err error var err error
attrTypes["cpu"] = basetypes.Int32Type{}.TerraformType(ctx) attrTypes["cpu"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["description"] = basetypes.StringType{}.TerraformType(ctx) attrTypes["description"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["id"] = basetypes.StringType{}.TerraformType(ctx) attrTypes["id"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["max_gb"] = basetypes.Int32Type{}.TerraformType(ctx) attrTypes["max_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["memory"] = basetypes.Int32Type{}.TerraformType(ctx) attrTypes["memory"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["min_gb"] = basetypes.Int32Type{}.TerraformType(ctx) attrTypes["min_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["node_type"] = basetypes.StringType{}.TerraformType(ctx) attrTypes["node_type"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["storage_classes"] = basetypes.ListType{ attrTypes["storage_classes"] = basetypes.ListType{
ElemType: StorageClassesValue{}.Type(ctx), ElemType: StorageClassesValue{}.Type(ctx),
@ -821,12 +821,12 @@ func (v FlavorsValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue,
} }
attributeTypes := map[string]attr.Type{ attributeTypes := map[string]attr.Type{
"cpu": basetypes.Int32Type{}, "cpu": basetypes.Int64Type{},
"description": basetypes.StringType{}, "description": basetypes.StringType{},
"id": basetypes.StringType{}, "id": basetypes.StringType{},
"max_gb": basetypes.Int32Type{}, "max_gb": basetypes.Int64Type{},
"memory": basetypes.Int32Type{}, "memory": basetypes.Int64Type{},
"min_gb": basetypes.Int32Type{}, "min_gb": basetypes.Int64Type{},
"node_type": basetypes.StringType{}, "node_type": basetypes.StringType{},
"storage_classes": basetypes.ListType{ "storage_classes": basetypes.ListType{
ElemType: StorageClassesValue{}.Type(ctx), ElemType: StorageClassesValue{}.Type(ctx),
@ -917,12 +917,12 @@ func (v FlavorsValue) Type(ctx context.Context) attr.Type {
func (v FlavorsValue) AttributeTypes(ctx context.Context) map[string]attr.Type { func (v FlavorsValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{ return map[string]attr.Type{
"cpu": basetypes.Int32Type{}, "cpu": basetypes.Int64Type{},
"description": basetypes.StringType{}, "description": basetypes.StringType{},
"id": basetypes.StringType{}, "id": basetypes.StringType{},
"max_gb": basetypes.Int32Type{}, "max_gb": basetypes.Int64Type{},
"memory": basetypes.Int32Type{}, "memory": basetypes.Int64Type{},
"min_gb": basetypes.Int32Type{}, "min_gb": basetypes.Int64Type{},
"node_type": basetypes.StringType{}, "node_type": basetypes.StringType{},
"storage_classes": basetypes.ListType{ "storage_classes": basetypes.ListType{
ElemType: StorageClassesValue{}.Type(ctx), ElemType: StorageClassesValue{}.Type(ctx),
@ -983,12 +983,12 @@ func (t StorageClassesType) ValueFromObject(ctx context.Context, in basetypes.Ob
return nil, diags return nil, diags
} }
maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int32Value) maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int32Value, was: %T`, maxIoPerSecAttribute)) fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute))
} }
maxThroughInMbAttribute, ok := attributes["max_through_in_mb"] maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
@ -1001,12 +1001,12 @@ func (t StorageClassesType) ValueFromObject(ctx context.Context, in basetypes.Ob
return nil, diags return nil, diags
} }
maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int32Value) maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int32Value, was: %T`, maxThroughInMbAttribute)) fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute))
} }
if diags.HasError() { if diags.HasError() {
@ -1112,12 +1112,12 @@ func NewStorageClassesValue(attributeTypes map[string]attr.Type, attributes map[
return NewStorageClassesValueUnknown(), diags return NewStorageClassesValueUnknown(), diags
} }
maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int32Value) maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int32Value, was: %T`, maxIoPerSecAttribute)) fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute))
} }
maxThroughInMbAttribute, ok := attributes["max_through_in_mb"] maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
@ -1130,12 +1130,12 @@ func NewStorageClassesValue(attributeTypes map[string]attr.Type, attributes map[
return NewStorageClassesValueUnknown(), diags return NewStorageClassesValueUnknown(), diags
} }
maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int32Value) maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int32Value, was: %T`, maxThroughInMbAttribute)) fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute))
} }
if diags.HasError() { if diags.HasError() {
@ -1219,8 +1219,8 @@ var _ basetypes.ObjectValuable = StorageClassesValue{}
type StorageClassesValue struct { type StorageClassesValue struct {
Class basetypes.StringValue `tfsdk:"class"` Class basetypes.StringValue `tfsdk:"class"`
MaxIoPerSec basetypes.Int32Value `tfsdk:"max_io_per_sec"` MaxIoPerSec basetypes.Int64Value `tfsdk:"max_io_per_sec"`
MaxThroughInMb basetypes.Int32Value `tfsdk:"max_through_in_mb"` MaxThroughInMb basetypes.Int64Value `tfsdk:"max_through_in_mb"`
state attr.ValueState state attr.ValueState
} }
@ -1231,8 +1231,8 @@ func (v StorageClassesValue) ToTerraformValue(ctx context.Context) (tftypes.Valu
var err error var err error
attrTypes["class"] = basetypes.StringType{}.TerraformType(ctx) attrTypes["class"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["max_io_per_sec"] = basetypes.Int32Type{}.TerraformType(ctx) attrTypes["max_io_per_sec"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["max_through_in_mb"] = basetypes.Int32Type{}.TerraformType(ctx) attrTypes["max_through_in_mb"] = basetypes.Int64Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes} objectType := tftypes.Object{AttributeTypes: attrTypes}
@ -1295,8 +1295,8 @@ func (v StorageClassesValue) ToObjectValue(ctx context.Context) (basetypes.Objec
attributeTypes := map[string]attr.Type{ attributeTypes := map[string]attr.Type{
"class": basetypes.StringType{}, "class": basetypes.StringType{},
"max_io_per_sec": basetypes.Int32Type{}, "max_io_per_sec": basetypes.Int64Type{},
"max_through_in_mb": basetypes.Int32Type{}, "max_through_in_mb": basetypes.Int64Type{},
} }
if v.IsNull() { if v.IsNull() {
@ -1359,8 +1359,8 @@ func (v StorageClassesValue) Type(ctx context.Context) attr.Type {
func (v StorageClassesValue) AttributeTypes(ctx context.Context) map[string]attr.Type { func (v StorageClassesValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{ return map[string]attr.Type{
"class": basetypes.StringType{}, "class": basetypes.StringType{},
"max_io_per_sec": basetypes.Int32Type{}, "max_io_per_sec": basetypes.Int64Type{},
"max_through_in_mb": basetypes.Int32Type{}, "max_through_in_mb": basetypes.Int64Type{},
} }
} }
@ -1399,12 +1399,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags return nil, diags
} }
pageVal, ok := pageAttribute.(basetypes.Int32Value) pageVal, ok := pageAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute)) fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
} }
sizeAttribute, ok := attributes["size"] sizeAttribute, ok := attributes["size"]
@ -1417,12 +1417,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags return nil, diags
} }
sizeVal, ok := sizeAttribute.(basetypes.Int32Value) sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute)) fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
} }
sortAttribute, ok := attributes["sort"] sortAttribute, ok := attributes["sort"]
@ -1453,12 +1453,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags return nil, diags
} }
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value) totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute)) fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
} }
totalRowsAttribute, ok := attributes["total_rows"] totalRowsAttribute, ok := attributes["total_rows"]
@ -1471,12 +1471,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
return nil, diags return nil, diags
} }
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value) totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute)) fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
} }
if diags.HasError() { if diags.HasError() {
@ -1566,12 +1566,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags return NewPaginationValueUnknown(), diags
} }
pageVal, ok := pageAttribute.(basetypes.Int32Value) pageVal, ok := pageAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute)) fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
} }
sizeAttribute, ok := attributes["size"] sizeAttribute, ok := attributes["size"]
@ -1584,12 +1584,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags return NewPaginationValueUnknown(), diags
} }
sizeVal, ok := sizeAttribute.(basetypes.Int32Value) sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute)) fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
} }
sortAttribute, ok := attributes["sort"] sortAttribute, ok := attributes["sort"]
@ -1620,12 +1620,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags return NewPaginationValueUnknown(), diags
} }
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value) totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute)) fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
} }
totalRowsAttribute, ok := attributes["total_rows"] totalRowsAttribute, ok := attributes["total_rows"]
@ -1638,12 +1638,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
return NewPaginationValueUnknown(), diags return NewPaginationValueUnknown(), diags
} }
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value) totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute)) fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
} }
if diags.HasError() { if diags.HasError() {
@ -1728,11 +1728,11 @@ func (t PaginationType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = PaginationValue{} var _ basetypes.ObjectValuable = PaginationValue{}
type PaginationValue struct { type PaginationValue struct {
Page basetypes.Int32Value `tfsdk:"page"` Page basetypes.Int64Value `tfsdk:"page"`
Size basetypes.Int32Value `tfsdk:"size"` Size basetypes.Int64Value `tfsdk:"size"`
Sort basetypes.StringValue `tfsdk:"sort"` Sort basetypes.StringValue `tfsdk:"sort"`
TotalPages basetypes.Int32Value `tfsdk:"total_pages"` TotalPages basetypes.Int64Value `tfsdk:"total_pages"`
TotalRows basetypes.Int32Value `tfsdk:"total_rows"` TotalRows basetypes.Int64Value `tfsdk:"total_rows"`
state attr.ValueState state attr.ValueState
} }
@ -1742,11 +1742,11 @@ func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, e
var val tftypes.Value var val tftypes.Value
var err error var err error
attrTypes["page"] = basetypes.Int32Type{}.TerraformType(ctx) attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["size"] = basetypes.Int32Type{}.TerraformType(ctx) attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx) attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
attrTypes["total_pages"] = basetypes.Int32Type{}.TerraformType(ctx) attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx)
attrTypes["total_rows"] = basetypes.Int32Type{}.TerraformType(ctx) attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes} objectType := tftypes.Object{AttributeTypes: attrTypes}
@ -1824,11 +1824,11 @@ func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectVal
var diags diag.Diagnostics var diags diag.Diagnostics
attributeTypes := map[string]attr.Type{ attributeTypes := map[string]attr.Type{
"page": basetypes.Int32Type{}, "page": basetypes.Int64Type{},
"size": basetypes.Int32Type{}, "size": basetypes.Int64Type{},
"sort": basetypes.StringType{}, "sort": basetypes.StringType{},
"total_pages": basetypes.Int32Type{}, "total_pages": basetypes.Int64Type{},
"total_rows": basetypes.Int32Type{}, "total_rows": basetypes.Int64Type{},
} }
if v.IsNull() { if v.IsNull() {
@ -1900,10 +1900,10 @@ func (v PaginationValue) Type(ctx context.Context) attr.Type {
func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type { func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{ return map[string]attr.Type{
"page": basetypes.Int32Type{}, "page": basetypes.Int64Type{},
"size": basetypes.Int32Type{}, "size": basetypes.Int64Type{},
"sort": basetypes.StringType{}, "sort": basetypes.StringType{},
"total_pages": basetypes.Int32Type{}, "total_pages": basetypes.Int64Type{},
"total_rows": basetypes.Int32Type{}, "total_rows": basetypes.Int64Type{},
} }
} }

View file

@ -1,24 +1,24 @@
package postgresflexalphaflavor package postgresFlexAlphaFlavor
import ( import (
"context" "context"
"fmt" "fmt"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api" postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
) )
type flavorsClientReader interface { type flavorsClientReader interface {
GetFlavorsRequest( GetFlavorsRequest(
ctx context.Context, ctx context.Context,
projectId, region string, projectId, region string,
) v3alpha1api.ApiGetFlavorsRequestRequest ) postgresflex.ApiGetFlavorsRequestRequest
} }
func getAllFlavors(ctx context.Context, client flavorsClientReader, projectId, region string) ( func getAllFlavors(ctx context.Context, client flavorsClientReader, projectId, region string) (
[]v3alpha1api.ListFlavors, []postgresflex.ListFlavors,
error, error,
) { ) {
getAllFilter := func(_ v3alpha1api.ListFlavors) bool { return true } getAllFilter := func(_ postgresflex.ListFlavors) bool { return true }
flavorList, err := getFlavorsByFilter(ctx, client, projectId, region, getAllFilter) flavorList, err := getFlavorsByFilter(ctx, client, projectId, region, getAllFilter)
if err != nil { if err != nil {
return nil, err return nil, err
@ -32,29 +32,29 @@ func getFlavorsByFilter(
ctx context.Context, ctx context.Context,
client flavorsClientReader, client flavorsClientReader,
projectId, region string, projectId, region string,
filter func(db v3alpha1api.ListFlavors) bool, filter func(db postgresflex.ListFlavors) bool,
) ([]v3alpha1api.ListFlavors, error) { ) ([]postgresflex.ListFlavors, error) {
if projectId == "" || region == "" { if projectId == "" || region == "" {
return nil, fmt.Errorf("listing postgresflex flavors: projectId and region are required") return nil, fmt.Errorf("listing postgresflex flavors: projectId and region are required")
} }
const pageSize = 25 const pageSize = 25
var result = make([]v3alpha1api.ListFlavors, 0) var result = make([]postgresflex.ListFlavors, 0)
for page := int32(1); ; page++ { for page := int32(1); ; page++ {
res, err := client.GetFlavorsRequest(ctx, projectId, region). res, err := client.GetFlavorsRequest(ctx, projectId, region).
Page(page).Size(pageSize).Sort(v3alpha1api.FLAVORSORT_ID_ASC).Execute() Page(page).Size(pageSize).Sort(postgresflex.FLAVORSORT_ID_ASC).Execute()
if err != nil { if err != nil {
return nil, fmt.Errorf("requesting flavors list (page %d): %w", page, err) return nil, fmt.Errorf("requesting flavors list (page %d): %w", page, err)
} }
// If the API returns no flavors, we have reached the end of the list. // If the API returns no flavors, we have reached the end of the list.
if len(res.Flavors) == 0 { if res.Flavors == nil || len(*res.Flavors) == 0 {
break break
} }
for _, flavor := range res.Flavors { for _, flavor := range *res.Flavors {
if filter(flavor) { if filter(flavor) {
result = append(result, flavor) result = append(result, flavor)
} }

View file

@ -1,11 +1,11 @@
package postgresflexalphaflavor package postgresFlexAlphaFlavor
/*
import ( import (
"context" "context"
"testing" "testing"
postgresflex "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api" "github.com/stackitcloud/stackit-sdk-go/core/utils"
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
) )
type mockRequest struct { type mockRequest struct {
@ -29,25 +29,25 @@ func (m *mockFlavorsClient) GetFlavorsRequest(_ context.Context, _, _ string) po
return m.executeRequest() return m.executeRequest()
} }
var mockResp = func(page int32) (*postgresflex.GetFlavorsResponse, error) { var mockResp = func(page int64) (*postgresflex.GetFlavorsResponse, error) {
if page == 1 { if page == 1 {
return &postgresflex.GetFlavorsResponse{ return &postgresflex.GetFlavorsResponse{
Flavors: []postgresflex.ListFlavors{ Flavors: &[]postgresflex.ListFlavors{
{Id: "flavor-1", Description: "first"}, {Id: utils.Ptr("flavor-1"), Description: utils.Ptr("first")},
{Id: "flavor-2", Description: "second"}, {Id: utils.Ptr("flavor-2"), Description: utils.Ptr("second")},
}, },
}, nil }, nil
} }
if page == 2 { if page == 2 {
return &postgresflex.GetFlavorsResponse{ return &postgresflex.GetFlavorsResponse{
Flavors: []postgresflex.ListFlavors{ Flavors: &[]postgresflex.ListFlavors{
{Id: "flavor-3", Description: "three"}, {Id: utils.Ptr("flavor-3"), Description: utils.Ptr("three")},
}, },
}, nil }, nil
} }
return &postgresflex.GetFlavorsResponse{ return &postgresflex.GetFlavorsResponse{
Flavors: []postgresflex.ListFlavors{}, Flavors: &[]postgresflex.ListFlavors{},
}, nil }, nil
} }
@ -71,7 +71,7 @@ func TestGetFlavorsByFilter(t *testing.T) {
{ {
description: "Success - Filter flavors by description", description: "Success - Filter flavors by description",
projectId: "pid", region: "reg", projectId: "pid", region: "reg",
filter: func(f postgresflex.ListFlavors) bool { return f.Description == "first" }, filter: func(f postgresflex.ListFlavors) bool { return *f.Description == "first" },
wantCount: 1, wantCount: 1,
wantErr: false, wantErr: false,
}, },
@ -85,10 +85,10 @@ func TestGetFlavorsByFilter(t *testing.T) {
for _, tt := range tests { for _, tt := range tests {
t.Run( t.Run(
tt.description, func(t *testing.T) { tt.description, func(t *testing.T) {
var currentPage int32 var currentPage int64
client := &mockFlavorsClient{ client := &mockFlavorsClient{
executeRequest: func() postgresflex.ApiGetFlavorsRequestRequest { executeRequest: func() postgresflex.ApiGetFlavorsRequestRequest {
return mockRequest{ return &mockRequest{
executeFunc: func() (*postgresflex.GetFlavorsResponse, error) { executeFunc: func() (*postgresflex.GetFlavorsResponse, error) {
currentPage++ currentPage++
return mockResp(currentPage) return mockResp(currentPage)
@ -112,10 +112,10 @@ func TestGetFlavorsByFilter(t *testing.T) {
} }
func TestGetAllFlavors(t *testing.T) { func TestGetAllFlavors(t *testing.T) {
var currentPage int32 var currentPage int64
client := &mockFlavorsClient{ client := &mockFlavorsClient{
executeRequest: func() postgresflex.ApiGetFlavorsRequestRequest { executeRequest: func() postgresflex.ApiGetFlavorsRequestRequest {
return mockRequest{ return &mockRequest{
executeFunc: func() (*postgresflex.GetFlavorsResponse, error) { executeFunc: func() (*postgresflex.GetFlavorsResponse, error) {
currentPage++ currentPage++
return mockResp(currentPage) return mockResp(currentPage)
@ -132,4 +132,3 @@ func TestGetAllFlavors(t *testing.T) {
t.Errorf("getAllFlavors() expected 3 flavor, got %d", len(res)) t.Errorf("getAllFlavors() expected 3 flavor, got %d", len(res))
} }
} }
*/

View file

@ -5,8 +5,7 @@ import (
"github.com/hashicorp/terraform-plugin-framework/datasource" "github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-log/tflog" "github.com/hashicorp/terraform-plugin-log/tflog"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
postgresflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavors/datasources_gen" postgresflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavors/datasources_gen"
@ -26,7 +25,7 @@ func NewFlavorsDataSource() datasource.DataSource {
type dataSourceModel = postgresflexalphaGen.FlavorsModel type dataSourceModel = postgresflexalphaGen.FlavorsModel
type flavorsDataSource struct { type flavorsDataSource struct {
client *v3alpha1api.APIClient client *postgresflexalpha.APIClient
providerData core.ProviderData providerData core.ProviderData
} }

View file

@ -6,15 +6,13 @@ import (
"net/http" "net/http"
"github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-framework/types"
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/datasources_gen" postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/datasources_gen"
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils" postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
"github.com/hashicorp/terraform-plugin-framework/datasource" "github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-log/tflog" "github.com/hashicorp/terraform-plugin-log/tflog"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils" "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
) )
@ -37,7 +35,7 @@ type dataSourceModel struct {
// instanceDataSource is the data source implementation. // instanceDataSource is the data source implementation.
type instanceDataSource struct { type instanceDataSource struct {
client *v3alpha1api.APIClient client *postgresflexalpha.APIClient
providerData core.ProviderData providerData core.ProviderData
} }
@ -96,7 +94,7 @@ func (r *instanceDataSource) Read(
ctx = tflog.SetField(ctx, "project_id", projectId) ctx = tflog.SetField(ctx, "project_id", projectId)
ctx = tflog.SetField(ctx, "instance_id", instanceId) ctx = tflog.SetField(ctx, "instance_id", instanceId)
ctx = tflog.SetField(ctx, "region", region) ctx = tflog.SetField(ctx, "region", region)
instanceResp, err := r.client.DefaultAPI.GetInstanceRequest(ctx, projectId, region, instanceId).Execute() instanceResp, err := r.client.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
if err != nil { if err != nil {
utils.LogError( utils.LogError(
ctx, ctx,

View file

@ -28,12 +28,10 @@ func InstanceDataSourceSchema(ctx context.Context) schema.Schema {
}, },
"backup_schedule": schema.StringAttribute{ "backup_schedule": schema.StringAttribute{
Computed: true, Computed: true,
Description: "The schedule for when the database backup will be created. Currently, ONLY daily schedules are supported (every 24 hours). The schedule is written as a cron schedule.", Description: "The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.",
MarkdownDescription: "The schedule for when the database backup will be created. Currently, ONLY daily schedules are supported (every 24 hours). The schedule is written as a cron schedule.", MarkdownDescription: "The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.",
}, },
"connection_info": schema.SingleNestedAttribute{ "connection_info": schema.SingleNestedAttribute{
Attributes: map[string]schema.Attribute{
"write": schema.SingleNestedAttribute{
Attributes: map[string]schema.Attribute{ Attributes: map[string]schema.Attribute{
"host": schema.StringAttribute{ "host": schema.StringAttribute{
Computed: true, Computed: true,
@ -46,24 +44,14 @@ func InstanceDataSourceSchema(ctx context.Context) schema.Schema {
MarkdownDescription: "The port of the instance.", MarkdownDescription: "The port of the instance.",
}, },
}, },
CustomType: WriteType{
ObjectType: types.ObjectType{
AttrTypes: WriteValue{}.AttributeTypes(ctx),
},
},
Computed: true,
Description: "The DNS name and port in the instance overview",
MarkdownDescription: "The DNS name and port in the instance overview",
},
},
CustomType: ConnectionInfoType{ CustomType: ConnectionInfoType{
ObjectType: types.ObjectType{ ObjectType: types.ObjectType{
AttrTypes: ConnectionInfoValue{}.AttributeTypes(ctx), AttrTypes: ConnectionInfoValue{}.AttributeTypes(ctx),
}, },
}, },
Computed: true, Computed: true,
Description: "The connection information of the instance", Description: "The DNS name and port in the instance overview",
MarkdownDescription: "The connection information of the instance", MarkdownDescription: "The DNS name and port in the instance overview",
}, },
"encryption": schema.SingleNestedAttribute{ "encryption": schema.SingleNestedAttribute{
Attributes: map[string]schema.Attribute{ Attributes: map[string]schema.Attribute{
@ -255,22 +243,40 @@ func (t ConnectionInfoType) ValueFromObject(ctx context.Context, in basetypes.Ob
attributes := in.Attributes() attributes := in.Attributes()
writeAttribute, ok := attributes["write"] hostAttribute, ok := attributes["host"]
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Missing", "Attribute Missing",
`write is missing from object`) `host is missing from object`)
return nil, diags return nil, diags
} }
writeVal, ok := writeAttribute.(basetypes.ObjectValue) hostVal, ok := hostAttribute.(basetypes.StringValue)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`write expected to be basetypes.ObjectValue, was: %T`, writeAttribute)) fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
}
portAttribute, ok := attributes["port"]
if !ok {
diags.AddError(
"Attribute Missing",
`port is missing from object`)
return nil, diags
}
portVal, ok := portAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
} }
if diags.HasError() { if diags.HasError() {
@ -278,7 +284,8 @@ func (t ConnectionInfoType) ValueFromObject(ctx context.Context, in basetypes.Ob
} }
return ConnectionInfoValue{ return ConnectionInfoValue{
Write: writeVal, Host: hostVal,
Port: portVal,
state: attr.ValueStateKnown, state: attr.ValueStateKnown,
}, diags }, diags
} }
@ -346,22 +353,40 @@ func NewConnectionInfoValue(attributeTypes map[string]attr.Type, attributes map[
return NewConnectionInfoValueUnknown(), diags return NewConnectionInfoValueUnknown(), diags
} }
writeAttribute, ok := attributes["write"] hostAttribute, ok := attributes["host"]
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Missing", "Attribute Missing",
`write is missing from object`) `host is missing from object`)
return NewConnectionInfoValueUnknown(), diags return NewConnectionInfoValueUnknown(), diags
} }
writeVal, ok := writeAttribute.(basetypes.ObjectValue) hostVal, ok := hostAttribute.(basetypes.StringValue)
if !ok { if !ok {
diags.AddError( diags.AddError(
"Attribute Wrong Type", "Attribute Wrong Type",
fmt.Sprintf(`write expected to be basetypes.ObjectValue, was: %T`, writeAttribute)) fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
}
portAttribute, ok := attributes["port"]
if !ok {
diags.AddError(
"Attribute Missing",
`port is missing from object`)
return NewConnectionInfoValueUnknown(), diags
}
portVal, ok := portAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
} }
if diags.HasError() { if diags.HasError() {
@ -369,7 +394,8 @@ func NewConnectionInfoValue(attributeTypes map[string]attr.Type, attributes map[
} }
return ConnectionInfoValue{ return ConnectionInfoValue{
Write: writeVal, Host: hostVal,
Port: portVal,
state: attr.ValueStateKnown, state: attr.ValueStateKnown,
}, diags }, diags
} }
@ -442,401 +468,12 @@ func (t ConnectionInfoType) ValueType(ctx context.Context) attr.Value {
var _ basetypes.ObjectValuable = ConnectionInfoValue{} var _ basetypes.ObjectValuable = ConnectionInfoValue{}
type ConnectionInfoValue struct { type ConnectionInfoValue struct {
Write basetypes.ObjectValue `tfsdk:"write"`
state attr.ValueState
}
func (v ConnectionInfoValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
attrTypes := make(map[string]tftypes.Type, 1)
var val tftypes.Value
var err error
attrTypes["write"] = basetypes.ObjectType{
AttrTypes: WriteValue{}.AttributeTypes(ctx),
}.TerraformType(ctx)
objectType := tftypes.Object{AttributeTypes: attrTypes}
switch v.state {
case attr.ValueStateKnown:
vals := make(map[string]tftypes.Value, 1)
val, err = v.Write.ToTerraformValue(ctx)
if err != nil {
return tftypes.NewValue(objectType, tftypes.UnknownValue), err
}
vals["write"] = val
if err := tftypes.ValidateValue(objectType, vals); err != nil {
return tftypes.NewValue(objectType, tftypes.UnknownValue), err
}
return tftypes.NewValue(objectType, vals), nil
case attr.ValueStateNull:
return tftypes.NewValue(objectType, nil), nil
case attr.ValueStateUnknown:
return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
default:
panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
}
}
func (v ConnectionInfoValue) IsNull() bool {
return v.state == attr.ValueStateNull
}
func (v ConnectionInfoValue) IsUnknown() bool {
return v.state == attr.ValueStateUnknown
}
func (v ConnectionInfoValue) String() string {
return "ConnectionInfoValue"
}
func (v ConnectionInfoValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
var diags diag.Diagnostics
var write basetypes.ObjectValue
if v.Write.IsNull() {
write = types.ObjectNull(
WriteValue{}.AttributeTypes(ctx),
)
}
if v.Write.IsUnknown() {
write = types.ObjectUnknown(
WriteValue{}.AttributeTypes(ctx),
)
}
if !v.Write.IsNull() && !v.Write.IsUnknown() {
write = types.ObjectValueMust(
WriteValue{}.AttributeTypes(ctx),
v.Write.Attributes(),
)
}
attributeTypes := map[string]attr.Type{
"write": basetypes.ObjectType{
AttrTypes: WriteValue{}.AttributeTypes(ctx),
},
}
if v.IsNull() {
return types.ObjectNull(attributeTypes), diags
}
if v.IsUnknown() {
return types.ObjectUnknown(attributeTypes), diags
}
objVal, diags := types.ObjectValue(
attributeTypes,
map[string]attr.Value{
"write": write,
})
return objVal, diags
}
func (v ConnectionInfoValue) Equal(o attr.Value) bool {
other, ok := o.(ConnectionInfoValue)
if !ok {
return false
}
if v.state != other.state {
return false
}
if v.state != attr.ValueStateKnown {
return true
}
if !v.Write.Equal(other.Write) {
return false
}
return true
}
func (v ConnectionInfoValue) Type(ctx context.Context) attr.Type {
return ConnectionInfoType{
basetypes.ObjectType{
AttrTypes: v.AttributeTypes(ctx),
},
}
}
func (v ConnectionInfoValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{
"write": basetypes.ObjectType{
AttrTypes: WriteValue{}.AttributeTypes(ctx),
},
}
}
var _ basetypes.ObjectTypable = WriteType{}
type WriteType struct {
basetypes.ObjectType
}
func (t WriteType) Equal(o attr.Type) bool {
other, ok := o.(WriteType)
if !ok {
return false
}
return t.ObjectType.Equal(other.ObjectType)
}
func (t WriteType) String() string {
return "WriteType"
}
func (t WriteType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
var diags diag.Diagnostics
attributes := in.Attributes()
hostAttribute, ok := attributes["host"]
if !ok {
diags.AddError(
"Attribute Missing",
`host is missing from object`)
return nil, diags
}
hostVal, ok := hostAttribute.(basetypes.StringValue)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
}
portAttribute, ok := attributes["port"]
if !ok {
diags.AddError(
"Attribute Missing",
`port is missing from object`)
return nil, diags
}
portVal, ok := portAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
}
if diags.HasError() {
return nil, diags
}
return WriteValue{
Host: hostVal,
Port: portVal,
state: attr.ValueStateKnown,
}, diags
}
func NewWriteValueNull() WriteValue {
return WriteValue{
state: attr.ValueStateNull,
}
}
func NewWriteValueUnknown() WriteValue {
return WriteValue{
state: attr.ValueStateUnknown,
}
}
func NewWriteValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (WriteValue, diag.Diagnostics) {
var diags diag.Diagnostics
// Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
ctx := context.Background()
for name, attributeType := range attributeTypes {
attribute, ok := attributes[name]
if !ok {
diags.AddError(
"Missing WriteValue Attribute Value",
"While creating a WriteValue value, a missing attribute value was detected. "+
"A WriteValue must contain values for all attributes, even if null or unknown. "+
"This is always an issue with the provider and should be reported to the provider developers.\n\n"+
fmt.Sprintf("WriteValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
)
continue
}
if !attributeType.Equal(attribute.Type(ctx)) {
diags.AddError(
"Invalid WriteValue Attribute Type",
"While creating a WriteValue value, an invalid attribute value was detected. "+
"A WriteValue must use a matching attribute type for the value. "+
"This is always an issue with the provider and should be reported to the provider developers.\n\n"+
fmt.Sprintf("WriteValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
fmt.Sprintf("WriteValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
)
}
}
for name := range attributes {
_, ok := attributeTypes[name]
if !ok {
diags.AddError(
"Extra WriteValue Attribute Value",
"While creating a WriteValue value, an extra attribute value was detected. "+
"A WriteValue must not contain values beyond the expected attribute types. "+
"This is always an issue with the provider and should be reported to the provider developers.\n\n"+
fmt.Sprintf("Extra WriteValue Attribute Name: %s", name),
)
}
}
if diags.HasError() {
return NewWriteValueUnknown(), diags
}
hostAttribute, ok := attributes["host"]
if !ok {
diags.AddError(
"Attribute Missing",
`host is missing from object`)
return NewWriteValueUnknown(), diags
}
hostVal, ok := hostAttribute.(basetypes.StringValue)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
}
portAttribute, ok := attributes["port"]
if !ok {
diags.AddError(
"Attribute Missing",
`port is missing from object`)
return NewWriteValueUnknown(), diags
}
portVal, ok := portAttribute.(basetypes.Int64Value)
if !ok {
diags.AddError(
"Attribute Wrong Type",
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
}
if diags.HasError() {
return NewWriteValueUnknown(), diags
}
return WriteValue{
Host: hostVal,
Port: portVal,
state: attr.ValueStateKnown,
}, diags
}
func NewWriteValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) WriteValue {
object, diags := NewWriteValue(attributeTypes, attributes)
if diags.HasError() {
// This could potentially be added to the diag package.
diagsStrings := make([]string, 0, len(diags))
for _, diagnostic := range diags {
diagsStrings = append(diagsStrings, fmt.Sprintf(
"%s | %s | %s",
diagnostic.Severity(),
diagnostic.Summary(),
diagnostic.Detail()))
}
panic("NewWriteValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
}
return object
}
func (t WriteType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
if in.Type() == nil {
return NewWriteValueNull(), nil
}
if !in.Type().Equal(t.TerraformType(ctx)) {
return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
}
if !in.IsKnown() {
return NewWriteValueUnknown(), nil
}
if in.IsNull() {
return NewWriteValueNull(), nil
}
attributes := map[string]attr.Value{}
val := map[string]tftypes.Value{}
err := in.As(&val)
if err != nil {
return nil, err
}
for k, v := range val {
a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
if err != nil {
return nil, err
}
attributes[k] = a
}
return NewWriteValueMust(WriteValue{}.AttributeTypes(ctx), attributes), nil
}
func (t WriteType) ValueType(ctx context.Context) attr.Value {
return WriteValue{}
}
var _ basetypes.ObjectValuable = WriteValue{}
type WriteValue struct {
Host basetypes.StringValue `tfsdk:"host"` Host basetypes.StringValue `tfsdk:"host"`
Port basetypes.Int64Value `tfsdk:"port"` Port basetypes.Int64Value `tfsdk:"port"`
state attr.ValueState state attr.ValueState
} }
func (v WriteValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) { func (v ConnectionInfoValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
attrTypes := make(map[string]tftypes.Type, 2) attrTypes := make(map[string]tftypes.Type, 2)
var val tftypes.Value var val tftypes.Value
@ -881,19 +518,19 @@ func (v WriteValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error)
} }
} }
func (v WriteValue) IsNull() bool { func (v ConnectionInfoValue) IsNull() bool {
return v.state == attr.ValueStateNull return v.state == attr.ValueStateNull
} }
func (v WriteValue) IsUnknown() bool { func (v ConnectionInfoValue) IsUnknown() bool {
return v.state == attr.ValueStateUnknown return v.state == attr.ValueStateUnknown
} }
func (v WriteValue) String() string { func (v ConnectionInfoValue) String() string {
return "WriteValue" return "ConnectionInfoValue"
} }
func (v WriteValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) { func (v ConnectionInfoValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
var diags diag.Diagnostics var diags diag.Diagnostics
attributeTypes := map[string]attr.Type{ attributeTypes := map[string]attr.Type{
@ -919,8 +556,8 @@ func (v WriteValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, d
return objVal, diags return objVal, diags
} }
func (v WriteValue) Equal(o attr.Value) bool { func (v ConnectionInfoValue) Equal(o attr.Value) bool {
other, ok := o.(WriteValue) other, ok := o.(ConnectionInfoValue)
if !ok { if !ok {
return false return false
@ -945,15 +582,15 @@ func (v WriteValue) Equal(o attr.Value) bool {
return true return true
} }
func (v WriteValue) Type(ctx context.Context) attr.Type { func (v ConnectionInfoValue) Type(ctx context.Context) attr.Type {
return WriteType{ return ConnectionInfoType{
basetypes.ObjectType{ basetypes.ObjectType{
AttrTypes: v.AttributeTypes(ctx), AttrTypes: v.AttributeTypes(ctx),
}, },
} }
} }
func (v WriteValue) AttributeTypes(ctx context.Context) map[string]attr.Type { func (v ConnectionInfoValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
return map[string]attr.Type{ return map[string]attr.Type{
"host": basetypes.StringType{}, "host": basetypes.StringType{},
"port": basetypes.Int64Type{}, "port": basetypes.Int64Type{},

Some files were not shown because too many files have changed in this diff Show more