Compare commits
83 commits
alpha
...
chore/upda
| Author | SHA1 | Date | |
|---|---|---|---|
| 0326739bfb | |||
|
|
b920e88324 | ||
|
|
5931225e1f | ||
|
|
9340825210 | ||
|
|
43cc14e499 | ||
|
|
d2051874c8 | ||
|
|
042652c9da | ||
|
|
d78396445e | ||
|
|
344639a908 | ||
|
|
8e90aca4dd | ||
|
|
b29ba624fa | ||
|
|
ea0a749cd8 | ||
|
|
d971a470dc | ||
|
|
f58d0949fe | ||
|
|
1eeb188e1e | ||
|
|
6fe178910d | ||
|
|
944ced15f3 | ||
|
|
3309489612 | ||
|
|
0ba4612438 | ||
|
|
8c490da5ce | ||
|
|
eecf98f4b1 | ||
|
|
30e4ce6b2d | ||
|
|
50c44c75ae | ||
|
|
0d141a289c | ||
|
|
5975b2ba97 | ||
|
|
731957fcd0 | ||
|
|
80a69e264f | ||
|
|
f6e6f968d8 | ||
|
|
afe4d490e7 | ||
|
|
346538b09b | ||
|
|
68635670fb | ||
|
|
98b62e4c80 | ||
|
|
1f4ecb11e8 | ||
|
|
f5d7b26d07 | ||
|
|
178be40665 | ||
|
|
5eb38b1cda | ||
|
|
20f08d6ff4 | ||
|
|
f783c566fb | ||
|
|
4f10919818 | ||
|
|
3d04e1497b | ||
|
|
3949ec9935 | ||
|
|
1c756e3fe0 | ||
|
|
9b5db499b6 | ||
|
|
aa1cfaf5b0 | ||
|
|
ad463ebad4 | ||
|
|
939d1c117c | ||
|
|
3b30ca1cd7 | ||
|
|
73e7e9e6b0 | ||
|
|
b6a66358d9 | ||
|
|
2c1c10e402 | ||
|
|
3182aa29b6 | ||
|
|
53f4e4bd49 | ||
|
|
c8ea125bac | ||
|
|
ab9ff41b24 | ||
|
|
b9b2bc31bd | ||
|
|
169026b6d2 | ||
|
|
c7260e04e2 | ||
|
|
826bb5b36a | ||
|
|
ca0f646526 | ||
|
|
976d384bcf | ||
|
|
a203e6ff93 | ||
|
|
7f5802aff0 | ||
|
|
d6d3a795bb | ||
|
|
411e99739a | ||
|
|
872c06ec68 | ||
|
|
d951dab630 | ||
|
|
996523df38 | ||
|
|
5427b54995 | ||
|
|
143df848ed | ||
|
|
e3440bb664 | ||
|
|
67f1f940cb | ||
|
|
67a6017637 | ||
|
|
79ca4b2ea0 | ||
|
|
aaad987d6d | ||
|
|
452f077da2 | ||
|
|
7701171b6d | ||
|
|
5958474f9a | ||
|
|
55427f7a48 | ||
|
|
c15f21a16d | ||
|
|
a63ae1831d | ||
|
|
c63d1018da | ||
|
|
469ed9e056 | ||
|
|
cc08fca97a |
145 changed files with 5929 additions and 5298 deletions
220
.github/actions/acc_test/action.yaml
vendored
220
.github/actions/acc_test/action.yaml
vendored
|
|
@ -2,6 +2,11 @@ name: Acceptance Testing
|
||||||
description: "Acceptance Testing pipeline"
|
description: "Acceptance Testing pipeline"
|
||||||
|
|
||||||
inputs:
|
inputs:
|
||||||
|
test_timeout_string:
|
||||||
|
description: "string that determines the timeout (default: 45m)"
|
||||||
|
default: '45m'
|
||||||
|
required: true
|
||||||
|
|
||||||
go-version:
|
go-version:
|
||||||
description: "go version to install"
|
description: "go version to install"
|
||||||
default: '1.25'
|
default: '1.25'
|
||||||
|
|
@ -11,38 +16,78 @@ inputs:
|
||||||
description: "STACKIT project ID for tests"
|
description: "STACKIT project ID for tests"
|
||||||
required: true
|
required: true
|
||||||
|
|
||||||
|
project_user_email:
|
||||||
|
required: true
|
||||||
|
description: "project user email for acc testing"
|
||||||
|
|
||||||
|
tf_acc_kek_key_id:
|
||||||
|
description: "KEK key ID"
|
||||||
|
required: true
|
||||||
|
|
||||||
|
tf_acc_kek_key_ring_id:
|
||||||
|
description: "KEK key ring ID"
|
||||||
|
required: true
|
||||||
|
|
||||||
|
tf_acc_kek_key_version:
|
||||||
|
description: "KEK key version"
|
||||||
|
required: true
|
||||||
|
|
||||||
|
tf_acc_kek_service_account:
|
||||||
|
description: "KEK service account email"
|
||||||
|
required: true
|
||||||
|
|
||||||
region:
|
region:
|
||||||
description: "STACKIT region for tests"
|
description: "STACKIT region for tests"
|
||||||
default: 'eu01'
|
default: 'eu01'
|
||||||
required: true
|
required: true
|
||||||
|
|
||||||
service_account_json:
|
service_account_json_content:
|
||||||
description: "STACKIT service account JSON file contents"
|
description: "STACKIT service account JSON file contents"
|
||||||
required: true
|
required: true
|
||||||
|
default: ""
|
||||||
|
|
||||||
|
service_account_json_content_b64:
|
||||||
|
description: "STACKIT service account JSON file contents"
|
||||||
|
required: true
|
||||||
|
default: ""
|
||||||
|
|
||||||
|
service_account_json_file_path:
|
||||||
|
description: "STACKIT service account JSON file contents"
|
||||||
|
required: true
|
||||||
|
default: 'service_account.json'
|
||||||
|
|
||||||
test_file:
|
test_file:
|
||||||
description: "testfile to run"
|
description: "testfile to run"
|
||||||
default: ''
|
default: ''
|
||||||
|
|
||||||
outputs:
|
|
||||||
random-number:
|
#outputs:
|
||||||
description: "Random number"
|
# random-number:
|
||||||
value: ${{ steps.random-number-generator.outputs.random-number }}
|
# description: "Random number"
|
||||||
|
# value: ${{ steps.random-number-generator.outputs.random-number }}
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
steps:
|
steps:
|
||||||
- name: Random Number Generator
|
# - name: Random Number Generator
|
||||||
id: random-number-generator
|
# id: random-number-generator
|
||||||
run: echo "random-number=$(echo $RANDOM)" >> $GITHUB_OUTPUT
|
# run: echo "random-number=$(echo $RANDOM)" >> $GITHUB_OUTPUT
|
||||||
shell: bash
|
# shell: bash
|
||||||
|
|
||||||
- name: Install needed tools
|
- name: Install needed tools
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
|
echo "::group::apt install"
|
||||||
set -e
|
set -e
|
||||||
apt-get -y -qq update
|
apt-get -y -qq update >apt_update.log 2>apt_update_err.log
|
||||||
apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget
|
if [ $? -ne 0 ]; then
|
||||||
|
cat apt_update.log apt_update_err.log
|
||||||
|
fi
|
||||||
|
apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget >apt_get.log 2>apt_get_err.log
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
cat apt_get.log apt_get_err.log
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
- name: Setup JAVA
|
- name: Setup JAVA
|
||||||
uses: actions/setup-java@v5
|
uses: actions/setup-java@v5
|
||||||
|
|
@ -53,62 +98,165 @@ runs:
|
||||||
- name: Install Go ${{ inputs.go-version }}
|
- name: Install Go ${{ inputs.go-version }}
|
||||||
uses: actions/setup-go@v6
|
uses: actions/setup-go@v6
|
||||||
with:
|
with:
|
||||||
go-version: ${{ inputs.go-version }}
|
# go-version: ${{ inputs.go-version }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
go-version-file: 'go.mod'
|
go-version-file: 'go.mod'
|
||||||
|
|
||||||
|
- name: Determine GOMODCACHE
|
||||||
|
shell: bash
|
||||||
|
id: goenv
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
echo "gomodcache=$(go env GOMODCACHE)" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Restore cached GO pkg
|
||||||
|
id: cache-gopkg
|
||||||
|
uses: actions/cache/restore@v5
|
||||||
|
with:
|
||||||
|
path: "${{ steps.goenv.outputs.gomodcache }}"
|
||||||
|
key: ${{ runner.os }}-gopkg
|
||||||
|
|
||||||
- name: Install go tools
|
- name: Install go tools
|
||||||
|
if: steps.cache-gopkg.outputs.cache-hit != 'true'
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
|
echo "::group::go install"
|
||||||
set -e
|
set -e
|
||||||
go mod download
|
go mod download
|
||||||
go install golang.org/x/tools/cmd/goimports@latest
|
go install golang.org/x/tools/cmd/goimports@latest
|
||||||
go install github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.7.2
|
go install github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework@latest
|
||||||
go install github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs@v0.24.0
|
go install github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi@latest
|
||||||
|
go install github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs@latest
|
||||||
|
go install github.com/golangci/golangci-lint/v2/cmd/golangci-lint@latest
|
||||||
|
echo "::endgroup::"
|
||||||
|
- name: Run go mod tidy
|
||||||
|
shell: bash
|
||||||
|
run: go mod tidy
|
||||||
|
|
||||||
- name: Prepare pkg_gen directory
|
- name: Save GO package Cache
|
||||||
|
id: cache-gopkg-save
|
||||||
|
uses: actions/cache/save@v5
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
${{ steps.goenv.outputs.gomodcache }}
|
||||||
|
key: ${{ runner.os }}-gopkg
|
||||||
|
|
||||||
|
- name: Creating service_account file from json input
|
||||||
|
if: inputs.service_account_json_content != ''
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
go run cmd/main.go build -p
|
echo "::group::create service account file"
|
||||||
|
set -e
|
||||||
|
set -o pipefail
|
||||||
|
|
||||||
|
jsonFile="${{ inputs.service_account_json_file_path }}"
|
||||||
|
jsonFile="${jsonFile:-x}"
|
||||||
|
if [ "${jsonFile}" == "x" ]; then
|
||||||
|
echo "no service account file path provided"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ ! -f "${jsonFile}" ]; then
|
||||||
|
echo "creating service account file '${{ inputs.service_account_json_file_path }}'"
|
||||||
|
echo "${{ inputs.service_account_json_content }}" > stackit/"${{ inputs.service_account_json_file_path }}"
|
||||||
|
fi
|
||||||
|
ls -l stackit/"${{ inputs.service_account_json_file_path }}"
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Creating service_account file from base64 json input
|
||||||
|
if: inputs.service_account_json_content_b64 != ''
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo "::group::create service account file"
|
||||||
|
set -e
|
||||||
|
set -o pipefail
|
||||||
|
|
||||||
|
jsonFile="${{ inputs.service_account_json_file_path }}"
|
||||||
|
jsonFile="${jsonFile:-x}"
|
||||||
|
if [ "${jsonFile}" == "x" ]; then
|
||||||
|
echo "no service account file path provided"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ ! -f "${jsonFile}" ]; then
|
||||||
|
echo "creating service account file '${{ inputs.service_account_json_file_path }}'"
|
||||||
|
echo "${{ inputs.service_account_json_content_b64 }}" | base64 -d > stackit/"${{ inputs.service_account_json_file_path }}"
|
||||||
|
fi
|
||||||
|
ls -l stackit/"${{ inputs.service_account_json_file_path }}"
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
- name: Run acceptance test file
|
- name: Run acceptance test file
|
||||||
if: ${{ inputs.test_file != '' }}
|
if: ${{ inputs.test_file != '' }}
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
|
echo "::group::go test file"
|
||||||
|
set -e
|
||||||
|
set -o pipefail
|
||||||
|
|
||||||
echo "Running acceptance tests for the terraform provider"
|
echo "Running acceptance tests for the terraform provider"
|
||||||
echo "${STACKIT_SERVICE_ACCOUNT_JSON}" > ~/.service_account.json
|
cd stackit || exit 1
|
||||||
cd stackit
|
|
||||||
TF_ACC=1 \
|
TF_ACC=1 \
|
||||||
TF_ACC_PROJECT_ID=${TF_ACC_PROJECT_ID} \
|
TF_ACC_PROJECT_ID=${TF_ACC_PROJECT_ID} \
|
||||||
TF_ACC_REGION=${TF_ACC_REGION} \
|
TF_ACC_REGION=${TF_ACC_REGION} \
|
||||||
go test ${{ inputs.test_file }} -count=1 -timeout=30m
|
TF_ACC_TEST_PROJECT_USER_EMAIL=${TF_ACC_TEST_PROJECT_USER_EMAIL} \
|
||||||
|
TF_ACC_SERVICE_ACCOUNT_FILE="${PWD}/${{ inputs.service_account_json_file_path }}" \
|
||||||
|
TF_ACC_KEK_KEY_ID=${TF_ACC_KEK_KEY_ID} \
|
||||||
|
TF_ACC_KEK_KEY_RING_ID=${TF_ACC_KEK_KEY_RING_ID} \
|
||||||
|
TF_ACC_KEK_KEY_VERSION=${TF_ACC_KEK_KEY_VERSION} \
|
||||||
|
TF_ACC_KEK_SERVICE_ACCOUNT=${TF_ACC_KEK_SERVICE_ACCOUNT} \
|
||||||
|
go test ${{ inputs.test_file }} -count=1 -timeout=${{ inputs.test_timeout_string }}
|
||||||
|
echo "::endgroup::"
|
||||||
env:
|
env:
|
||||||
STACKIT_SERVICE_ACCOUNT_JSON: ${{ inputs.service_account_json }}
|
TF_ACC_PROJECT_ID: ${{ inputs.project_id }}
|
||||||
TF_PROJECT_ID: ${{ inputs.project_id }}
|
|
||||||
TF_ACC_REGION: ${{ inputs.region }}
|
TF_ACC_REGION: ${{ inputs.region }}
|
||||||
# TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL: ${{ secrets.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL }}
|
TF_ACC_TEST_PROJECT_USER_EMAIL: ${{ inputs.project_user_email }}
|
||||||
# TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN: ${{ secrets.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN }}
|
TF_ACC_KEK_KEY_ID: ${{ inputs.tf_acc_kek_key_id }}
|
||||||
# TF_ACC_TEST_PROJECT_PARENT_CONTAINER_ID: ${{ secrets.TF_ACC_TEST_PROJECT_PARENT_CONTAINER_ID }}
|
TF_ACC_KEK_KEY_RING_ID: ${{ inputs.tf_acc_kek_key_ring_id }}
|
||||||
# TF_ACC_TEST_PROJECT_PARENT_UUID: ${{ secrets.TF_ACC_TEST_PROJECT_PARENT_UUID }}
|
TF_ACC_KEK_KEY_VERSION: ${{ inputs.tf_acc_kek_key_version }}
|
||||||
# TF_ACC_TEST_PROJECT_USER_EMAIL: ${{ secrets.TF_ACC_TEST_PROJECT_USER_EMAIL }}
|
TF_ACC_KEK_SERVICE_ACCOUNT: ${{ inputs.tf_acc_kek_service_account }}
|
||||||
|
|
||||||
|
# - name: Run test action
|
||||||
|
# if: ${{ inputs.test_file == '' }}
|
||||||
|
# env:
|
||||||
|
# TF_ACC: 1
|
||||||
|
# TF_ACC_PROJECT_ID: ${{ inputs.project_id }}
|
||||||
|
# TF_ACC_REGION: ${{ inputs.region }}
|
||||||
|
# TF_ACC_TEST_PROJECT_USER_EMAIL: ${{ inputs.project_user_email }}
|
||||||
|
# TF_ACC_KEK_KEY_ID: ${{ inputs.tf_acc_kek_key_id }}
|
||||||
|
# TF_ACC_KEK_KEY_RING_ID: ${{ inputs.tf_acc_kek_key_ring_id }}
|
||||||
|
# TF_ACC_KEK_KEY_VERSION: ${{ inputs.tf_acc_kek_key_version }}
|
||||||
|
# TF_ACC_KEK_SERVICE_ACCOUNT: ${{ inputs.tf_acc_kek_service_account }}
|
||||||
|
# TF_ACC_SERVICE_ACCOUNT_FILE: "${PWD}/${{ inputs.service_account_json_file_path }}"
|
||||||
|
# uses: robherley/go-test-action@v0.1.0
|
||||||
|
# with:
|
||||||
|
# testArguments: "./... -timeout 45m"
|
||||||
|
|
||||||
- name: Run acceptance tests
|
- name: Run acceptance tests
|
||||||
if: ${{ inputs.test_file == '' }}
|
if: ${{ inputs.test_file == '' }}
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
|
echo "::group::go test all"
|
||||||
|
set -e
|
||||||
|
set -o pipefail
|
||||||
|
|
||||||
echo "Running acceptance tests for the terraform provider"
|
echo "Running acceptance tests for the terraform provider"
|
||||||
echo "${STACKIT_SERVICE_ACCOUNT_JSON}" > ~/.service_account.json
|
cd stackit || exit 1
|
||||||
cd stackit
|
|
||||||
TF_ACC=1 \
|
TF_ACC=1 \
|
||||||
TF_ACC_PROJECT_ID=${TF_ACC_PROJECT_ID} \
|
TF_ACC_PROJECT_ID=${TF_ACC_PROJECT_ID} \
|
||||||
TF_ACC_REGION=${TF_ACC_REGION} \
|
TF_ACC_REGION=${TF_ACC_REGION} \
|
||||||
go test ./... -count=1 -timeout=30m
|
TF_ACC_TEST_PROJECT_USER_EMAIL=${TF_ACC_TEST_PROJECT_USER_EMAIL} \
|
||||||
|
TF_ACC_SERVICE_ACCOUNT_FILE="${PWD}/${{ inputs.service_account_json_file_path }}" \
|
||||||
|
TF_ACC_KEK_KEY_ID=${TF_ACC_KEK_KEY_ID} \
|
||||||
|
TF_ACC_KEK_KEY_RING_ID=${TF_ACC_KEK_KEY_RING_ID} \
|
||||||
|
TF_ACC_KEK_KEY_VERSION=${TF_ACC_KEK_KEY_VERSION} \
|
||||||
|
TF_ACC_KEK_SERVICE_ACCOUNT=${TF_ACC_KEK_SERVICE_ACCOUNT} \
|
||||||
|
go test ./... -count=1 -timeout=${{ inputs.test_timeout_string }}
|
||||||
|
echo "::endgroup::"
|
||||||
env:
|
env:
|
||||||
STACKIT_SERVICE_ACCOUNT_JSON: ${{ inputs.service_account_json }}
|
TF_ACC_PROJECT_ID: ${{ inputs.project_id }}
|
||||||
TF_PROJECT_ID: ${{ inputs.project_id }}
|
|
||||||
TF_ACC_REGION: ${{ inputs.region }}
|
TF_ACC_REGION: ${{ inputs.region }}
|
||||||
# TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL: ${{ secrets.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL }}
|
TF_ACC_TEST_PROJECT_USER_EMAIL: ${{ inputs.project_user_email }}
|
||||||
# TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN: ${{ secrets.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN }}
|
TF_ACC_KEK_KEY_ID: ${{ inputs.tf_acc_kek_key_id }}
|
||||||
# TF_ACC_TEST_PROJECT_PARENT_CONTAINER_ID: ${{ secrets.TF_ACC_TEST_PROJECT_PARENT_CONTAINER_ID }}
|
TF_ACC_KEK_KEY_RING_ID: ${{ inputs.tf_acc_kek_key_ring_id }}
|
||||||
# TF_ACC_TEST_PROJECT_PARENT_UUID: ${{ secrets.TF_ACC_TEST_PROJECT_PARENT_UUID }}
|
TF_ACC_KEK_KEY_VERSION: ${{ inputs.tf_acc_kek_key_version }}
|
||||||
# TF_ACC_TEST_PROJECT_USER_EMAIL: ${{ secrets.TF_ACC_TEST_PROJECT_USER_EMAIL }}
|
TF_ACC_KEK_SERVICE_ACCOUNT: ${{ inputs.tf_acc_kek_service_account }}
|
||||||
|
|
|
||||||
54
.github/actions/build/action.yaml
vendored
54
.github/actions/build/action.yaml
vendored
|
|
@ -20,25 +20,63 @@ runs:
|
||||||
run: |
|
run: |
|
||||||
set -e
|
set -e
|
||||||
apt-get -y -qq update
|
apt-get -y -qq update
|
||||||
apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget
|
apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget unzip bc
|
||||||
|
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Install Go ${{ inputs.go-version }}
|
- name: Install Go ${{ inputs.go-version }}
|
||||||
uses: actions/setup-go@v6
|
uses: actions/setup-go@v6
|
||||||
with:
|
with:
|
||||||
go-version: ${{ inputs.go-version }}
|
# go-version: ${{ inputs.go-version }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
go-version-file: 'go.mod'
|
go-version-file: 'go.mod'
|
||||||
|
|
||||||
|
- name: Determine GOMODCACHE
|
||||||
|
shell: bash
|
||||||
|
id: goenv
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
# echo "::set-output name=gomodcache::$(go env GOMODCACHE)"
|
||||||
|
echo "gomodcache=$(go env GOMODCACHE)" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Restore cached GO pkg
|
||||||
|
id: cache-gopkg
|
||||||
|
uses: actions/cache/restore@v5
|
||||||
|
with:
|
||||||
|
path: "${{ steps.goenv.outputs.gomodcache }}"
|
||||||
|
key: ${{ runner.os }}-gopkg
|
||||||
|
|
||||||
- name: Install go tools
|
- name: Install go tools
|
||||||
|
if: steps.cache-gopkg.outputs.cache-hit != 'true'
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
set -e
|
set -e
|
||||||
go install golang.org/x/tools/cmd/goimports@latest
|
go install golang.org/x/tools/cmd/goimports@latest
|
||||||
go install github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework@latest
|
go install github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework@latest
|
||||||
go install github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi@latest
|
go install github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi@latest
|
||||||
go install github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs@v0.24.0
|
go install github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs@latest
|
||||||
|
|
||||||
|
# - name: Run build pkg directory
|
||||||
|
# shell: bash
|
||||||
|
# run: |
|
||||||
|
# set -e
|
||||||
|
# go run generator/main.go build
|
||||||
|
|
||||||
|
- name: Get all go packages
|
||||||
|
if: steps.cache-gopkg.outputs.cache-hit != 'true'
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
go get ./...
|
||||||
|
|
||||||
|
- name: Save Cache
|
||||||
|
id: cache-gopkg-save
|
||||||
|
uses: actions/cache/save@v5
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
${{ steps.goenv.outputs.gomodcache }}
|
||||||
|
key: ${{ runner.os }}-gopkg
|
||||||
|
|
||||||
- name: Setup JAVA ${{ inputs.java-distribution }} ${{ inputs.go-version }}
|
- name: Setup JAVA ${{ inputs.java-distribution }} ${{ inputs.go-version }}
|
||||||
uses: actions/setup-java@v5
|
uses: actions/setup-java@v5
|
||||||
|
|
@ -46,16 +84,6 @@ runs:
|
||||||
distribution: ${{ inputs.java-distribution }} # See 'Supported distributions' for available options
|
distribution: ${{ inputs.java-distribution }} # See 'Supported distributions' for available options
|
||||||
java-version: ${{ inputs.java-version }}
|
java-version: ${{ inputs.java-version }}
|
||||||
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v6
|
|
||||||
|
|
||||||
- name: Run build pkg directory
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
set -e
|
|
||||||
go run cmd/main.go build
|
|
||||||
|
|
||||||
|
|
||||||
- name: Run make to build app
|
- name: Run make to build app
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
|
|
|
||||||
4
.github/actions/setup-cache-go/action.yaml
vendored
4
.github/actions/setup-cache-go/action.yaml
vendored
|
|
@ -26,9 +26,9 @@ runs:
|
||||||
uses: https://code.forgejo.org/actions/setup-go@v6
|
uses: https://code.forgejo.org/actions/setup-go@v6
|
||||||
id: go-version
|
id: go-version
|
||||||
with:
|
with:
|
||||||
go-version: ${{ inputs.go-version }}
|
# go-version: ${{ inputs.go-version }}
|
||||||
check-latest: true # Always check for the latest patch release
|
check-latest: true # Always check for the latest patch release
|
||||||
# go-version-file: "go.mod"
|
go-version-file: "go.mod"
|
||||||
# do not cache dependencies, we do this manually
|
# do not cache dependencies, we do this manually
|
||||||
cache: false
|
cache: false
|
||||||
|
|
||||||
|
|
|
||||||
44
.github/workflows/ci.yaml
vendored
44
.github/workflows/ci.yaml
vendored
|
|
@ -234,29 +234,29 @@ jobs:
|
||||||
run: make lint
|
run: make lint
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
|
|
||||||
# - name: Testing
|
# - name: Testing
|
||||||
# run: make test
|
# run: make test
|
||||||
#
|
#
|
||||||
# - name: Acceptance Testing
|
# - name: Acceptance Testing
|
||||||
# if: ${{ github.event_name == 'pull_request' }}
|
# if: ${{ github.event_name == 'pull_request' }}
|
||||||
# run: make test-acceptance-tf
|
# run: make test-acceptance-tf
|
||||||
#
|
#
|
||||||
# - name: Check coverage threshold
|
# - name: Check coverage threshold
|
||||||
# shell: bash
|
# shell: bash
|
||||||
# run: |
|
# run: |
|
||||||
# make coverage
|
# make coverage
|
||||||
# COVERAGE=$(go tool cover -func=coverage.out | grep total | awk '{print $3}' | sed 's/%//')
|
# COVERAGE=$(go tool cover -func=coverage.out | grep total | awk '{print $3}' | sed 's/%//')
|
||||||
# echo "Coverage: $COVERAGE%"
|
# echo "Coverage: $COVERAGE%"
|
||||||
# if (( $(echo "$COVERAGE < 80" | bc -l) )); then
|
# if (( $(echo "$COVERAGE < 80" | bc -l) )); then
|
||||||
# echo "Coverage is below 80%"
|
# echo "Coverage is below 80%"
|
||||||
# # exit 1
|
# # exit 1
|
||||||
# fi
|
# fi
|
||||||
|
|
||||||
# - name: Archive code coverage results
|
# - name: Archive code coverage results
|
||||||
# uses: actions/upload-artifact@v4
|
# uses: actions/upload-artifact@v4
|
||||||
# with:
|
# with:
|
||||||
# name: ${{ env.CODE_COVERAGE_ARTIFACT_NAME }}
|
# name: ${{ env.CODE_COVERAGE_ARTIFACT_NAME }}
|
||||||
# path: "stackit/${{ env.CODE_COVERAGE_FILE_NAME }}"
|
# path: "stackit/${{ env.CODE_COVERAGE_FILE_NAME }}"
|
||||||
|
|
||||||
config:
|
config:
|
||||||
if: ${{ github.event_name != 'schedule' }}
|
if: ${{ github.event_name != 'schedule' }}
|
||||||
|
|
|
||||||
328
.github/workflows/ci_new.yaml
vendored
Normal file
328
.github/workflows/ci_new.yaml
vendored
Normal file
|
|
@ -0,0 +1,328 @@
|
||||||
|
name: CI Workflow
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- alpha
|
||||||
|
- main
|
||||||
|
workflow_dispatch:
|
||||||
|
schedule:
|
||||||
|
# every sunday at 00:00
|
||||||
|
# - cron: '0 0 * * 0'
|
||||||
|
# every day at 00:00
|
||||||
|
- cron: '0 0 * * *'
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- '!main'
|
||||||
|
- '!alpha'
|
||||||
|
paths:
|
||||||
|
- '!.github'
|
||||||
|
|
||||||
|
env:
|
||||||
|
GO_VERSION: "1.25"
|
||||||
|
CODE_COVERAGE_FILE_NAME: "coverage.out" # must be the same as in Makefile
|
||||||
|
CODE_COVERAGE_ARTIFACT_NAME: "code-coverage"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
config:
|
||||||
|
if: ${{ github.event_name != 'schedule' }}
|
||||||
|
name: Check GoReleaser config
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
|
- name: Check GoReleaser
|
||||||
|
uses: goreleaser/goreleaser-action@v7
|
||||||
|
with:
|
||||||
|
args: check
|
||||||
|
|
||||||
|
prepare:
|
||||||
|
name: Prepare GO cache
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
actions: read # Required to identify workflow run.
|
||||||
|
checks: write # Required to add status summary.
|
||||||
|
contents: read # Required to checkout repository.
|
||||||
|
pull-requests: write # Required to add PR comment.
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
|
- name: Install Go ${{ inputs.go-version }}
|
||||||
|
id: go-install
|
||||||
|
uses: actions/setup-go@v6
|
||||||
|
with:
|
||||||
|
# go-version: ${{ inputs.go-version }}
|
||||||
|
check-latest: true
|
||||||
|
go-version-file: 'go.mod'
|
||||||
|
|
||||||
|
- name: Determine GOMODCACHE
|
||||||
|
shell: bash
|
||||||
|
id: goenv
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
# echo "::set-output name=gomodcache::$(go env GOMODCACHE)"
|
||||||
|
echo "gomodcache=$(go env GOMODCACHE)" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Restore cached GO pkg
|
||||||
|
id: cache-gopkg
|
||||||
|
uses: actions/cache/restore@v5
|
||||||
|
with:
|
||||||
|
path: "${{ steps.goenv.outputs.gomodcache }}"
|
||||||
|
key: ${{ runner.os }}-gopkg
|
||||||
|
|
||||||
|
- name: Install go tools
|
||||||
|
if: steps.cache-gopkg.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
go install golang.org/x/tools/cmd/goimports@latest
|
||||||
|
go install github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework@latest
|
||||||
|
go install github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi@latest
|
||||||
|
|
||||||
|
- name: Get all go packages
|
||||||
|
if: steps.cache-gopkg.outputs.cache-hit != 'true'
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
go get ./...
|
||||||
|
|
||||||
|
- name: Save Cache
|
||||||
|
if: steps.cache-gopkg.outputs.cache-hit != 'true'
|
||||||
|
id: cache-gopkg-save
|
||||||
|
uses: actions/cache/save@v5
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
${{ steps.goenv.outputs.gomodcache }}
|
||||||
|
key: ${{ runner.os }}-gopkg
|
||||||
|
|
||||||
|
|
||||||
|
publish_test:
|
||||||
|
name: "Test readiness for publishing provider"
|
||||||
|
needs:
|
||||||
|
- config
|
||||||
|
- prepare
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
actions: read # Required to identify workflow run.
|
||||||
|
checks: write # Required to add status summary.
|
||||||
|
contents: read # Required to checkout repository.
|
||||||
|
pull-requests: write # Required to add PR comment.
|
||||||
|
steps:
|
||||||
|
- name: Install needed tools
|
||||||
|
run: |
|
||||||
|
apt-get -y -qq update
|
||||||
|
apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget unzip bc
|
||||||
|
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
|
- name: Setup Go
|
||||||
|
uses: actions/setup-go@v6
|
||||||
|
with:
|
||||||
|
# go-version: ${{ env.GO_VERSION }}
|
||||||
|
check-latest: true
|
||||||
|
go-version-file: 'go.mod'
|
||||||
|
|
||||||
|
- name: Install go tools
|
||||||
|
run: |
|
||||||
|
go install golang.org/x/tools/cmd/goimports@latest
|
||||||
|
go install github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework@latest
|
||||||
|
go install github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi@latest
|
||||||
|
|
||||||
|
- name: Setup JAVA
|
||||||
|
uses: actions/setup-java@v5
|
||||||
|
with:
|
||||||
|
distribution: 'temurin' # See 'Supported distributions' for available options
|
||||||
|
java-version: '21'
|
||||||
|
|
||||||
|
# - name: Run build pkg directory
|
||||||
|
# run: |
|
||||||
|
# go run generator/main.go build
|
||||||
|
|
||||||
|
- name: Set up s3cfg
|
||||||
|
run: |
|
||||||
|
cat <<'EOF' >> ~/.s3cfg
|
||||||
|
[default]
|
||||||
|
host_base = https://object.storage.eu01.onstackit.cloud
|
||||||
|
host_bucket = https://%(bucket).object.storage.eu01.onstackit.cloud
|
||||||
|
check_ssl_certificate = False
|
||||||
|
access_key = ${{ secrets.S3_ACCESS_KEY }}
|
||||||
|
secret_key = ${{ secrets.S3_SECRET_KEY }}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
- name: Import GPG key
|
||||||
|
run: |
|
||||||
|
echo "${{ secrets.PRIVATE_KEY_PEM }}" > ~/private.key.pem
|
||||||
|
gpg --import ~/private.key.pem
|
||||||
|
rm ~/private.key.pem
|
||||||
|
|
||||||
|
- name: Run GoReleaser with SNAPSHOT
|
||||||
|
id: goreleaser
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ env.FORGEJO_TOKEN }}
|
||||||
|
GPG_FINGERPRINT: ${{ secrets.GPG_FINGERPRINT }}
|
||||||
|
uses: goreleaser/goreleaser-action@v7
|
||||||
|
with:
|
||||||
|
args: release --skip publish --clean --snapshot
|
||||||
|
|
||||||
|
- name: Prepare key file
|
||||||
|
run: |
|
||||||
|
echo "${{ secrets.PUBLIC_KEY_PEM }}" >public_key.pem
|
||||||
|
|
||||||
|
- name: Prepare provider directory structure
|
||||||
|
run: |
|
||||||
|
VERSION=$(jq -r .version < dist/metadata.json)
|
||||||
|
go run generator/main.go \
|
||||||
|
publish \
|
||||||
|
--namespace=mhenselin \
|
||||||
|
--providerName=stackitprivatepreview \
|
||||||
|
--repoName=terraform-provider-stackitprivatepreview \
|
||||||
|
--domain=tfregistry.sysops.stackit.rocks \
|
||||||
|
--gpgFingerprint="${{ secrets.GPG_FINGERPRINT }}" \
|
||||||
|
--gpgPubKeyFile=public_key.pem \
|
||||||
|
--version=${VERSION}
|
||||||
|
|
||||||
|
testing:
|
||||||
|
name: CI run tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs:
|
||||||
|
- config
|
||||||
|
- prepare
|
||||||
|
env:
|
||||||
|
TF_ACC_PROJECT_ID: ${{ vars.TF_ACC_PROJECT_ID }}
|
||||||
|
TF_ACC_ORGANIZATION_ID: ${{ vars.TF_ACC_ORGANIZATION_ID }}
|
||||||
|
TF_ACC_REGION: ${{ vars.TF_ACC_REGION }}
|
||||||
|
TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL: ${{ vars.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL }}
|
||||||
|
TF_ACC_SERVICE_ACCOUNT_FILE: "~/service_account.json"
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
uses: ./.github/actions/build
|
||||||
|
with:
|
||||||
|
go-version: ${{ env.GO_VERSION }}
|
||||||
|
|
||||||
|
- name: Setup Terraform
|
||||||
|
uses: hashicorp/setup-terraform@v2
|
||||||
|
with:
|
||||||
|
terraform_wrapper: false
|
||||||
|
|
||||||
|
- name: Create service account json file
|
||||||
|
if: ${{ github.event_name == 'pull_request' }}
|
||||||
|
run: |
|
||||||
|
echo "${{ secrets.TF_ACC_SERVICE_ACCOUNT_JSON }}" >~/.service_account.json
|
||||||
|
|
||||||
|
- name: Run go mod tidy
|
||||||
|
if: ${{ github.event_name == 'pull_request' }}
|
||||||
|
run: go mod tidy
|
||||||
|
|
||||||
|
- name: Testing
|
||||||
|
run: |
|
||||||
|
TF_ACC_SERVICE_ACCOUNT_FILE=~/.service_account.json
|
||||||
|
export TF_ACC_SERVICE_ACCOUNT_FILE
|
||||||
|
make test
|
||||||
|
|
||||||
|
- name: Acceptance Testing
|
||||||
|
env:
|
||||||
|
TF_ACC: "1"
|
||||||
|
if: ${{ github.event_name == 'pull_request' }}
|
||||||
|
run: |
|
||||||
|
TF_ACC_SERVICE_ACCOUNT_FILE=~/.service_account.json
|
||||||
|
export TF_ACC_SERVICE_ACCOUNT_FILE
|
||||||
|
make test-acceptance-tf
|
||||||
|
|
||||||
|
- name: Check coverage threshold
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
make coverage
|
||||||
|
COVERAGE=$(go tool cover -func=coverage.out | grep total | awk '{print $3}' | sed 's/%//')
|
||||||
|
echo "Coverage: $COVERAGE%"
|
||||||
|
if (( $(echo "$COVERAGE < 80" | bc -l) )); then
|
||||||
|
echo "Coverage is below 80%"
|
||||||
|
# exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Archive code coverage results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ${{ env.CODE_COVERAGE_ARTIFACT_NAME }}
|
||||||
|
path: "stackit/${{ env.CODE_COVERAGE_FILE_NAME }}"
|
||||||
|
|
||||||
|
main:
|
||||||
|
if: ${{ github.event_name != 'schedule' }}
|
||||||
|
name: CI run build and linting
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs:
|
||||||
|
- config
|
||||||
|
- prepare
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
|
# - uses: actions/cache@v5
|
||||||
|
# id: cache
|
||||||
|
# with:
|
||||||
|
# path: path/to/dependencies
|
||||||
|
# key: ${{ runner.os }}-${{ hashFiles('**/lockfiles') }}
|
||||||
|
|
||||||
|
# - name: Install Dependencies
|
||||||
|
# if: steps.cache.outputs.cache-hit != 'true'
|
||||||
|
# run: /install.sh
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
uses: ./.github/actions/build
|
||||||
|
with:
|
||||||
|
go-version: ${{ env.GO_VERSION }}
|
||||||
|
|
||||||
|
- name: Setup Terraform
|
||||||
|
uses: hashicorp/setup-terraform@v2
|
||||||
|
with:
|
||||||
|
terraform_wrapper: false
|
||||||
|
|
||||||
|
- name: "Ensure docs are up-to-date"
|
||||||
|
if: ${{ github.event_name == 'pull_request' }}
|
||||||
|
run: ./scripts/check-docs.sh
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: "Run go mod tidy"
|
||||||
|
if: ${{ github.event_name == 'pull_request' }}
|
||||||
|
run: go mod tidy
|
||||||
|
|
||||||
|
- name: golangci-lint
|
||||||
|
uses: golangci/golangci-lint-action@v9
|
||||||
|
with:
|
||||||
|
version: v2.10
|
||||||
|
args: --config=.golang-ci.yaml --allow-parallel-runners --timeout=5m
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Linting terraform files
|
||||||
|
run: make lint-tf
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
code_coverage:
|
||||||
|
name: "Code coverage report"
|
||||||
|
if: github.event_name == 'pull_request' # Do not run when workflow is triggered by push to main branch
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs:
|
||||||
|
- main
|
||||||
|
- prepare
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
actions: read # to download code coverage results from "main" job
|
||||||
|
pull-requests: write # write permission needed to comment on PR
|
||||||
|
steps:
|
||||||
|
- name: Install needed tools
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
apt-get -y -qq update
|
||||||
|
apt-get -y -qq install sudo
|
||||||
|
|
||||||
|
- name: Check new code coverage
|
||||||
|
uses: fgrosse/go-coverage-report@v1.2.0
|
||||||
|
continue-on-error: true # Add this line to prevent pipeline failures in forks
|
||||||
|
with:
|
||||||
|
coverage-artifact-name: ${{ env.CODE_COVERAGE_ARTIFACT_NAME }}
|
||||||
|
coverage-file-name: ${{ env.CODE_COVERAGE_FILE_NAME }}
|
||||||
|
root-package: 'github.com/stackitcloud/terraform-provider-stackit'
|
||||||
16
.github/workflows/publish.yaml
vendored
16
.github/workflows/publish.yaml
vendored
|
|
@ -23,7 +23,7 @@ jobs:
|
||||||
uses: actions/checkout@v6
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Check GoReleaser
|
- name: Check GoReleaser
|
||||||
uses: goreleaser/goreleaser-action@v6
|
uses: goreleaser/goreleaser-action@v7
|
||||||
with:
|
with:
|
||||||
args: check
|
args: check
|
||||||
|
|
||||||
|
|
@ -44,9 +44,11 @@ jobs:
|
||||||
apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget
|
apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget
|
||||||
|
|
||||||
- name: Setup Go
|
- name: Setup Go
|
||||||
uses: actions/setup-go@v6
|
uses: https://code.forgejo.org/actions/setup-go@v6
|
||||||
with:
|
with:
|
||||||
go-version: ${{ env.GO_VERSION }}
|
# go-version: ${{ env.GO_VERSION }}
|
||||||
|
check-latest: true
|
||||||
|
go-version-file: 'go.mod'
|
||||||
|
|
||||||
- name: Install go tools
|
- name: Install go tools
|
||||||
run: |
|
run: |
|
||||||
|
|
@ -68,7 +70,7 @@ jobs:
|
||||||
set -e
|
set -e
|
||||||
mkdir -p generated/services
|
mkdir -p generated/services
|
||||||
mkdir -p generated/internal/services
|
mkdir -p generated/internal/services
|
||||||
go run cmd/main.go build
|
go run generator/main.go build
|
||||||
|
|
||||||
- name: Set up s3cfg
|
- name: Set up s3cfg
|
||||||
run: |
|
run: |
|
||||||
|
|
@ -93,7 +95,7 @@ jobs:
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ env.FORGEJO_TOKEN }}
|
GITHUB_TOKEN: ${{ env.FORGEJO_TOKEN }}
|
||||||
GPG_FINGERPRINT: ${{ secrets.GPG_FINGERPRINT }}
|
GPG_FINGERPRINT: ${{ secrets.GPG_FINGERPRINT }}
|
||||||
uses: goreleaser/goreleaser-action@v6
|
uses: goreleaser/goreleaser-action@v7
|
||||||
with:
|
with:
|
||||||
args: release --skip publish --clean --snapshot
|
args: release --skip publish --clean --snapshot
|
||||||
|
|
||||||
|
|
@ -103,7 +105,7 @@ jobs:
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ env.FORGEJO_TOKEN }}
|
GITHUB_TOKEN: ${{ env.FORGEJO_TOKEN }}
|
||||||
GPG_FINGERPRINT: ${{ secrets.GPG_FINGERPRINT }}
|
GPG_FINGERPRINT: ${{ secrets.GPG_FINGERPRINT }}
|
||||||
uses: goreleaser/goreleaser-action@v6
|
uses: goreleaser/goreleaser-action@v7
|
||||||
with:
|
with:
|
||||||
args: release --skip publish --clean
|
args: release --skip publish --clean
|
||||||
|
|
||||||
|
|
@ -114,7 +116,7 @@ jobs:
|
||||||
- name: Prepare provider directory structure
|
- name: Prepare provider directory structure
|
||||||
run: |
|
run: |
|
||||||
VERSION=$(jq -r .version < dist/metadata.json)
|
VERSION=$(jq -r .version < dist/metadata.json)
|
||||||
go run cmd/main.go \
|
go run generator/main.go \
|
||||||
publish \
|
publish \
|
||||||
--namespace=mhenselin \
|
--namespace=mhenselin \
|
||||||
--providerName=stackitprivatepreview \
|
--providerName=stackitprivatepreview \
|
||||||
|
|
|
||||||
6
.github/workflows/release.yaml
vendored
6
.github/workflows/release.yaml
vendored
|
|
@ -22,17 +22,19 @@ jobs:
|
||||||
with:
|
with:
|
||||||
# Allow goreleaser to access older tag information.
|
# Allow goreleaser to access older tag information.
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-go@v5
|
|
||||||
|
- uses: https://code.forgejo.org/actions/setup-go@v6
|
||||||
with:
|
with:
|
||||||
go-version-file: "go.mod"
|
go-version-file: "go.mod"
|
||||||
cache: true
|
cache: true
|
||||||
|
|
||||||
- name: Import GPG key
|
- name: Import GPG key
|
||||||
uses: crazy-max/ghaction-import-gpg@v6
|
uses: crazy-max/ghaction-import-gpg@v6
|
||||||
id: import_gpg
|
id: import_gpg
|
||||||
with:
|
with:
|
||||||
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
|
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
|
||||||
- name: Run GoReleaser
|
- name: Run GoReleaser
|
||||||
uses: goreleaser/goreleaser-action@v6
|
uses: goreleaser/goreleaser-action@v7
|
||||||
with:
|
with:
|
||||||
args: release --clean
|
args: release --clean
|
||||||
env:
|
env:
|
||||||
|
|
|
||||||
10
.github/workflows/tf-acc-test.yaml
vendored
10
.github/workflows/tf-acc-test.yaml
vendored
|
|
@ -18,6 +18,12 @@ jobs:
|
||||||
uses: ./.github/actions/acc_test
|
uses: ./.github/actions/acc_test
|
||||||
with:
|
with:
|
||||||
go-version: ${{ env.GO_VERSION }}
|
go-version: ${{ env.GO_VERSION }}
|
||||||
project_id: ${{ vars.TEST_PROJECT_ID }}
|
project_id: ${{ vars.TF_ACC_PROJECT_ID }}
|
||||||
region: 'eu01'
|
region: 'eu01'
|
||||||
service_account_json: ${{ secrets.TF_ACC_SERVICE_ACCOUNT_JSON }}
|
service_account_json_content_b64: "${{ secrets.TF_ACC_SERVICE_ACCOUNT_JSON_B64 }}"
|
||||||
|
project_user_email: ${{ vars.TEST_PROJECT_USER_EMAIL }}
|
||||||
|
tf_acc_kek_key_id: ${{ vars.TF_ACC_KEK_KEY_ID }}
|
||||||
|
tf_acc_kek_key_ring_id: ${{ vars.TF_ACC_KEK_KEY_RING_ID }}
|
||||||
|
tf_acc_kek_key_version: ${{ vars.TF_ACC_KEK_KEY_VERSION }}
|
||||||
|
tf_acc_kek_service_account: ${{ vars.TF_ACC_KEK_SERVICE_ACCOUNT }}
|
||||||
|
# service_account_json_file_path: "~/service_account.json"
|
||||||
|
|
|
||||||
1
.gitignore
vendored
1
.gitignore
vendored
|
|
@ -40,6 +40,7 @@ coverage.out
|
||||||
coverage.html
|
coverage.html
|
||||||
generated
|
generated
|
||||||
stackit-sdk-generator
|
stackit-sdk-generator
|
||||||
|
stackit-sdk-generator/**
|
||||||
dist
|
dist
|
||||||
|
|
||||||
.secrets
|
.secrets
|
||||||
|
|
|
||||||
94
.golang-ci.yaml
Normal file
94
.golang-ci.yaml
Normal file
|
|
@ -0,0 +1,94 @@
|
||||||
|
version: "2"
|
||||||
|
run:
|
||||||
|
concurrency: 4
|
||||||
|
output:
|
||||||
|
formats:
|
||||||
|
text:
|
||||||
|
print-linter-name: true
|
||||||
|
print-issued-lines: true
|
||||||
|
colors: true
|
||||||
|
path: stdout
|
||||||
|
linters:
|
||||||
|
enable:
|
||||||
|
- bodyclose
|
||||||
|
- depguard
|
||||||
|
- errorlint
|
||||||
|
- forcetypeassert
|
||||||
|
- gochecknoinits
|
||||||
|
- gocritic
|
||||||
|
- gosec
|
||||||
|
- misspell
|
||||||
|
- nakedret
|
||||||
|
- revive
|
||||||
|
- sqlclosecheck
|
||||||
|
- wastedassign
|
||||||
|
disable:
|
||||||
|
- noctx
|
||||||
|
- unparam
|
||||||
|
settings:
|
||||||
|
depguard:
|
||||||
|
rules:
|
||||||
|
main:
|
||||||
|
list-mode: lax
|
||||||
|
allow:
|
||||||
|
- tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview
|
||||||
|
- github.com/hashicorp/terraform-plugin-framework
|
||||||
|
- github.com/hashicorp/terraform-plugin-log
|
||||||
|
- github.com/stackitcloud/stackit-sdk-go
|
||||||
|
deny:
|
||||||
|
- pkg: github.com/stretchr/testify
|
||||||
|
desc: Do not use a testing framework
|
||||||
|
gocritic:
|
||||||
|
disabled-checks:
|
||||||
|
- wrapperFunc
|
||||||
|
- typeDefFirst
|
||||||
|
- ifElseChain
|
||||||
|
- dupImport
|
||||||
|
- hugeParam
|
||||||
|
enabled-tags:
|
||||||
|
- performance
|
||||||
|
- style
|
||||||
|
- experimental
|
||||||
|
gosec:
|
||||||
|
excludes:
|
||||||
|
- G104
|
||||||
|
- G102
|
||||||
|
- G304
|
||||||
|
- G307
|
||||||
|
misspell:
|
||||||
|
locale: US
|
||||||
|
nakedret:
|
||||||
|
max-func-lines: 0
|
||||||
|
revive:
|
||||||
|
severity: error
|
||||||
|
rules:
|
||||||
|
- name: errorf
|
||||||
|
- name: context-as-argument
|
||||||
|
- name: error-return
|
||||||
|
- name: increment-decrement
|
||||||
|
- name: indent-error-flow
|
||||||
|
- name: superfluous-else
|
||||||
|
- name: unused-parameter
|
||||||
|
- name: unreachable-code
|
||||||
|
- name: atomic
|
||||||
|
- name: empty-lines
|
||||||
|
- name: early-return
|
||||||
|
exclusions:
|
||||||
|
paths:
|
||||||
|
- generator/
|
||||||
|
generated: lax
|
||||||
|
warn-unused: true
|
||||||
|
# Excluding configuration per-path, per-linter, per-text and per-source.
|
||||||
|
rules:
|
||||||
|
# Exclude some linters from running on tests files.
|
||||||
|
- path: _test\.go
|
||||||
|
linters:
|
||||||
|
- gochecknoinits
|
||||||
|
formatters:
|
||||||
|
enable:
|
||||||
|
#- gofmt
|
||||||
|
- goimports
|
||||||
|
settings:
|
||||||
|
goimports:
|
||||||
|
local-prefixes:
|
||||||
|
- tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview
|
||||||
5
Makefile
5
Makefile
|
|
@ -12,12 +12,13 @@ project-tools:
|
||||||
# LINT
|
# LINT
|
||||||
lint-golangci-lint:
|
lint-golangci-lint:
|
||||||
@echo "Linting with golangci-lint"
|
@echo "Linting with golangci-lint"
|
||||||
@go run github.com/golangci/golangci-lint/v2/cmd/golangci-lint run --fix --config golang-ci.yaml
|
@go run github.com/golangci/golangci-lint/v2/cmd/golangci-lint run --fix --config .golang-ci.yaml
|
||||||
|
|
||||||
|
|
||||||
lint-tf:
|
lint-tf:
|
||||||
@echo "Linting terraform files"
|
@echo "Linting terraform files"
|
||||||
@terraform fmt -check -diff -recursive
|
@terraform fmt -check -diff -recursive examples/
|
||||||
|
@terraform fmt -check -diff -recursive stackit/
|
||||||
|
|
||||||
lint: lint-golangci-lint lint-tf
|
lint: lint-golangci-lint lint-tf
|
||||||
|
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load diff
38
docs/data-sources/postgresflexalpha_database.md
Normal file
38
docs/data-sources/postgresflexalpha_database.md
Normal file
|
|
@ -0,0 +1,38 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_postgresflexalpha_database Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_postgresflexalpha_database (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
data "stackitprivatepreview_postgresflexalpha_database" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
database_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `database_id` (Number) The ID of the database.
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `id` (String) Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`,`database_id`\".",
|
||||||
|
- `name` (String) The name of the database.
|
||||||
|
- `owner` (String) The owner of the database.
|
||||||
|
- `tf_original_api_id` (Number) The id of the database.
|
||||||
54
docs/data-sources/postgresflexalpha_flavor.md
Normal file
54
docs/data-sources/postgresflexalpha_flavor.md
Normal file
|
|
@ -0,0 +1,54 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_postgresflexalpha_flavor Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_postgresflexalpha_flavor (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
data "stackitprivatepreview_postgresflexalpha_flavor" "flavor" {
|
||||||
|
project_id = var.project_id
|
||||||
|
region = var.region
|
||||||
|
cpu = 4
|
||||||
|
ram = 16
|
||||||
|
node_type = "Single"
|
||||||
|
storage_class = "premium-perf2-stackit"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `cpu` (Number) The cpu count of the instance.
|
||||||
|
- `node_type` (String) defines the nodeType it can be either single or replica
|
||||||
|
- `project_id` (String) The cpu count of the instance.
|
||||||
|
- `ram` (Number) The memory of the instance in Gibibyte.
|
||||||
|
- `region` (String) The flavor description.
|
||||||
|
- `storage_class` (String) The memory of the instance in Gibibyte.
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `description` (String) The flavor description.
|
||||||
|
- `flavor_id` (String) The flavor id of the instance flavor.
|
||||||
|
- `id` (String) The terraform id of the instance flavor.
|
||||||
|
- `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte.
|
||||||
|
- `min_gb` (Number) minimum storage which is required to order in Gigabyte.
|
||||||
|
- `storage_classes` (Attributes List) (see [below for nested schema](#nestedatt--storage_classes))
|
||||||
|
|
||||||
|
<a id="nestedatt--storage_classes"></a>
|
||||||
|
### Nested Schema for `storage_classes`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `class` (String)
|
||||||
|
- `max_io_per_sec` (Number)
|
||||||
|
- `max_through_in_mb` (Number)
|
||||||
68
docs/data-sources/postgresflexalpha_flavors.md
Normal file
68
docs/data-sources/postgresflexalpha_flavors.md
Normal file
|
|
@ -0,0 +1,68 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_postgresflexalpha_flavors Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_postgresflexalpha_flavors (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `page` (Number) Number of the page of items list to be returned.
|
||||||
|
- `size` (Number) Number of items to be returned on each page.
|
||||||
|
- `sort` (String) Sorting of the flavors to be returned on each page.
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `flavors` (Attributes List) List of flavors available for the project. (see [below for nested schema](#nestedatt--flavors))
|
||||||
|
- `pagination` (Attributes) (see [below for nested schema](#nestedatt--pagination))
|
||||||
|
|
||||||
|
<a id="nestedatt--flavors"></a>
|
||||||
|
### Nested Schema for `flavors`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `cpu` (Number) The cpu count of the instance.
|
||||||
|
- `description` (String) The flavor description.
|
||||||
|
- `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte.
|
||||||
|
- `memory` (Number) The memory of the instance in Gibibyte.
|
||||||
|
- `min_gb` (Number) minimum storage which is required to order in Gigabyte.
|
||||||
|
- `node_type` (String) defines the nodeType it can be either single or replica
|
||||||
|
- `storage_classes` (Attributes List) maximum storage which can be ordered for the flavor in Gigabyte. (see [below for nested schema](#nestedatt--flavors--storage_classes))
|
||||||
|
- `tf_original_api_id` (String) The id of the instance flavor.
|
||||||
|
|
||||||
|
<a id="nestedatt--flavors--storage_classes"></a>
|
||||||
|
### Nested Schema for `flavors.storage_classes`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `class` (String)
|
||||||
|
- `max_io_per_sec` (Number)
|
||||||
|
- `max_through_in_mb` (Number)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--pagination"></a>
|
||||||
|
### Nested Schema for `pagination`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `page` (Number)
|
||||||
|
- `size` (Number)
|
||||||
|
- `sort` (String)
|
||||||
|
- `total_pages` (Number)
|
||||||
|
- `total_rows` (Number)
|
||||||
95
docs/data-sources/postgresflexalpha_instance.md
Normal file
95
docs/data-sources/postgresflexalpha_instance.md
Normal file
|
|
@ -0,0 +1,95 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_postgresflexalpha_instance Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_postgresflexalpha_instance (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
data "stackitprivatepreview_postgresflexalpha_instance" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `acl` (List of String) List of IPV4 cidr.
|
||||||
|
- `backup_schedule` (String) The schedule for when the database backup will be created. Currently, ONLY daily schedules are supported (every 24 hours). The schedule is written as a cron schedule.
|
||||||
|
- `connection_info` (Attributes) The connection information of the instance (see [below for nested schema](#nestedatt--connection_info))
|
||||||
|
- `encryption` (Attributes) The configuration for instance's volume and backup storage encryption.
|
||||||
|
|
||||||
|
⚠︝ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected. (see [below for nested schema](#nestedatt--encryption))
|
||||||
|
- `flavor_id` (String) The id of the instance flavor.
|
||||||
|
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
|
||||||
|
- `name` (String) The name of the instance.
|
||||||
|
- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
|
||||||
|
- `replicas` (Number) How many replicas the instance should have.
|
||||||
|
- `retention_days` (Number) How long backups are retained. The value can only be between 32 and 365 days.
|
||||||
|
- `status` (String) The current status of the instance.
|
||||||
|
- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
|
||||||
|
- `tf_original_api_id` (String) The ID of the instance.
|
||||||
|
- `version` (String) The Postgres version used for the instance. See [Versions Endpoint](/documentation/postgres-flex-service/version/v3alpha1#tag/Version) for supported version parameters.
|
||||||
|
|
||||||
|
<a id="nestedatt--connection_info"></a>
|
||||||
|
### Nested Schema for `connection_info`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `write` (Attributes) The DNS name and port in the instance overview (see [below for nested schema](#nestedatt--connection_info--write))
|
||||||
|
|
||||||
|
<a id="nestedatt--connection_info--write"></a>
|
||||||
|
### Nested Schema for `connection_info.write`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `host` (String) The host of the instance.
|
||||||
|
- `port` (Number) The port of the instance.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--encryption"></a>
|
||||||
|
### Nested Schema for `encryption`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `kek_key_id` (String) The encryption-key key identifier
|
||||||
|
- `kek_key_ring_id` (String) The encryption-key keyring identifier
|
||||||
|
- `kek_key_version` (String) The encryption-key version
|
||||||
|
- `service_account` (String)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--network"></a>
|
||||||
|
### Nested Schema for `network`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `access_scope` (String) The access scope of the instance. It defines if the instance is public or airgapped.
|
||||||
|
- `acl` (List of String) List of IPV4 cidr.
|
||||||
|
- `instance_address` (String)
|
||||||
|
- `router_address` (String)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--storage"></a>
|
||||||
|
### Nested Schema for `storage`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `performance_class` (String) The storage class for the storage.
|
||||||
|
- `size` (Number) The storage size in Gigabytes.
|
||||||
42
docs/data-sources/postgresflexalpha_user.md
Normal file
42
docs/data-sources/postgresflexalpha_user.md
Normal file
|
|
@ -0,0 +1,42 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_postgresflexalpha_user Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_postgresflexalpha_user (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
data "stackitprivatepreview_postgresflexalpha_user" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
user_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
- `user_id` (Number) The ID of the user.
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `id` (String) Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`,`user_id`\".",
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `name` (String) The name of the user.
|
||||||
|
- `roles` (List of String) A list of user roles.
|
||||||
|
- `status` (String) The current status of the user.
|
||||||
|
- `tf_original_api_id` (Number) The ID of the user.
|
||||||
32
docs/data-sources/sqlserverflexalpha_database.md
Normal file
32
docs/data-sources/sqlserverflexalpha_database.md
Normal file
|
|
@ -0,0 +1,32 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexalpha_database Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexalpha_database (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `database_name` (String) The name of the database.
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
|
||||||
|
- `compatibility_level` (Number) CompatibilityLevel of the Database.
|
||||||
|
- `id` (String) The terraform internal identifier.
|
||||||
|
- `name` (String) The name of the database.
|
||||||
|
- `owner` (String) The owner of the database.
|
||||||
|
- `tf_original_api_id` (Number) The id of the database.
|
||||||
77
docs/data-sources/sqlserverflexalpha_instance.md
Normal file
77
docs/data-sources/sqlserverflexalpha_instance.md
Normal file
|
|
@ -0,0 +1,77 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexalpha_instance Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexalpha_instance (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
data "stackitprivatepreview_sqlserverflexalpha_instance" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
|
||||||
|
- `edition` (String) Edition of the MSSQL server instance
|
||||||
|
- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption))
|
||||||
|
- `flavor_id` (String) The id of the instance flavor.
|
||||||
|
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
|
||||||
|
- `name` (String) The name of the instance.
|
||||||
|
- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
|
||||||
|
- `replicas` (Number) How many replicas the instance should have.
|
||||||
|
- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365
|
||||||
|
- `status` (String)
|
||||||
|
- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
|
||||||
|
- `tf_original_api_id` (String) The ID of the instance.
|
||||||
|
- `version` (String) The sqlserver version used for the instance.
|
||||||
|
|
||||||
|
<a id="nestedatt--encryption"></a>
|
||||||
|
### Nested Schema for `encryption`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `kek_key_id` (String) The key identifier
|
||||||
|
- `kek_key_ring_id` (String) The keyring identifier
|
||||||
|
- `kek_key_version` (String) The key version
|
||||||
|
- `service_account` (String)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--network"></a>
|
||||||
|
### Nested Schema for `network`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `access_scope` (String) The network access scope of the instance
|
||||||
|
|
||||||
|
⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.
|
||||||
|
- `acl` (List of String) List of IPV4 cidr.
|
||||||
|
- `instance_address` (String)
|
||||||
|
- `router_address` (String)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--storage"></a>
|
||||||
|
### Nested Schema for `storage`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `class` (String) The storage class for the storage.
|
||||||
|
- `size` (Number) The storage size in Gigabytes.
|
||||||
62
docs/data-sources/sqlserverflexalpha_user.md
Normal file
62
docs/data-sources/sqlserverflexalpha_user.md
Normal file
|
|
@ -0,0 +1,62 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexalpha_user Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexalpha_user (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
data "stackitprivatepreview_sqlserverflexalpha_user" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
user_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `page` (Number) Number of the page of items list to be returned.
|
||||||
|
- `size` (Number) Number of items to be returned on each page.
|
||||||
|
- `sort` (String) Sorting of the users to be returned on each page.
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `pagination` (Attributes) (see [below for nested schema](#nestedatt--pagination))
|
||||||
|
- `users` (Attributes List) List of all users inside an instance (see [below for nested schema](#nestedatt--users))
|
||||||
|
|
||||||
|
<a id="nestedatt--pagination"></a>
|
||||||
|
### Nested Schema for `pagination`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `page` (Number)
|
||||||
|
- `size` (Number)
|
||||||
|
- `sort` (String)
|
||||||
|
- `total_pages` (Number)
|
||||||
|
- `total_rows` (Number)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--users"></a>
|
||||||
|
### Nested Schema for `users`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `status` (String) The current status of the user.
|
||||||
|
- `tf_original_api_id` (Number) The ID of the user.
|
||||||
|
- `username` (String) The name of the user.
|
||||||
40
docs/data-sources/sqlserverflexbeta_database.md
Normal file
40
docs/data-sources/sqlserverflexbeta_database.md
Normal file
|
|
@ -0,0 +1,40 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexbeta_database Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexbeta_database (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
data "stackitprivatepreview_sqlserverflexbeta_database" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
database_name = "dbname"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `database_name` (String) The name of the database.
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
|
||||||
|
- `compatibility_level` (Number) CompatibilityLevel of the Database.
|
||||||
|
- `id` (String) The terraform internal identifier.
|
||||||
|
- `name` (String) The name of the database.
|
||||||
|
- `owner` (String) The owner of the database.
|
||||||
|
- `tf_original_api_id` (Number) The id of the database.
|
||||||
77
docs/data-sources/sqlserverflexbeta_instance.md
Normal file
77
docs/data-sources/sqlserverflexbeta_instance.md
Normal file
|
|
@ -0,0 +1,77 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexbeta_instance Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexbeta_instance (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
data "stackitprivatepreview_sqlserverflexbeta_instance" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
|
||||||
|
- `edition` (String) Edition of the MSSQL server instance
|
||||||
|
- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption))
|
||||||
|
- `flavor_id` (String) The id of the instance flavor.
|
||||||
|
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
|
||||||
|
- `name` (String) The name of the instance.
|
||||||
|
- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
|
||||||
|
- `replicas` (Number) How many replicas the instance should have.
|
||||||
|
- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365
|
||||||
|
- `status` (String)
|
||||||
|
- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
|
||||||
|
- `tf_original_api_id` (String) The ID of the instance.
|
||||||
|
- `version` (String) The sqlserver version used for the instance.
|
||||||
|
|
||||||
|
<a id="nestedatt--encryption"></a>
|
||||||
|
### Nested Schema for `encryption`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `kek_key_id` (String) The key identifier
|
||||||
|
- `kek_key_ring_id` (String) The keyring identifier
|
||||||
|
- `kek_key_version` (String) The key version
|
||||||
|
- `service_account` (String)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--network"></a>
|
||||||
|
### Nested Schema for `network`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `access_scope` (String) The network access scope of the instance
|
||||||
|
|
||||||
|
⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.
|
||||||
|
- `acl` (List of String) List of IPV4 cidr.
|
||||||
|
- `instance_address` (String)
|
||||||
|
- `router_address` (String)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--storage"></a>
|
||||||
|
### Nested Schema for `storage`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `class` (String) The storage class for the storage.
|
||||||
|
- `size` (Number) The storage size in Gigabytes.
|
||||||
54
docs/data-sources/sqlserverflexbeta_user.md
Normal file
54
docs/data-sources/sqlserverflexbeta_user.md
Normal file
|
|
@ -0,0 +1,54 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexbeta_user Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexbeta_user (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `page` (Number) Number of the page of items list to be returned.
|
||||||
|
- `size` (Number) Number of items to be returned on each page.
|
||||||
|
- `sort` (String) Sorting of the users to be returned on each page.
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `pagination` (Attributes) (see [below for nested schema](#nestedatt--pagination))
|
||||||
|
- `users` (Attributes List) List of all users inside an instance (see [below for nested schema](#nestedatt--users))
|
||||||
|
|
||||||
|
<a id="nestedatt--pagination"></a>
|
||||||
|
### Nested Schema for `pagination`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `page` (Number)
|
||||||
|
- `size` (Number)
|
||||||
|
- `sort` (String)
|
||||||
|
- `total_pages` (Number)
|
||||||
|
- `total_rows` (Number)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--users"></a>
|
||||||
|
### Nested Schema for `users`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `status` (String) The current status of the user.
|
||||||
|
- `tf_original_api_id` (Number) The ID of the user.
|
||||||
|
- `username` (String) The name of the user.
|
||||||
83
docs/index.md
Normal file
83
docs/index.md
Normal file
|
|
@ -0,0 +1,83 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview Provider"
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview Provider
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
provider "stackitprivatepreview" {
|
||||||
|
default_region = "eu01"
|
||||||
|
}
|
||||||
|
|
||||||
|
provider "stackitprivatepreview" {
|
||||||
|
default_region = "eu01"
|
||||||
|
service_account_key_path = "service_account.json"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Authentication
|
||||||
|
|
||||||
|
# Key flow
|
||||||
|
provider "stackitprivatepreview" {
|
||||||
|
default_region = "eu01"
|
||||||
|
service_account_key = var.service_account_key
|
||||||
|
private_key = var.private_key
|
||||||
|
}
|
||||||
|
|
||||||
|
# Key flow (using path)
|
||||||
|
provider "stackitprivatepreview" {
|
||||||
|
default_region = "eu01"
|
||||||
|
service_account_key_path = var.service_account_key_path
|
||||||
|
private_key_path = var.private_key_path
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `authorization_custom_endpoint` (String) Custom endpoint for the Membership service
|
||||||
|
- `cdn_custom_endpoint` (String) Custom endpoint for the CDN service
|
||||||
|
- `credentials_path` (String) Path of JSON from where the credentials are read. Takes precedence over the env var `STACKIT_CREDENTIALS_PATH`. Default value is `~/.stackit/credentials.json`.
|
||||||
|
- `default_region` (String) Region will be used as the default location for regional services. Not all services require a region, some are global
|
||||||
|
- `dns_custom_endpoint` (String) Custom endpoint for the DNS service
|
||||||
|
- `enable_beta_resources` (Boolean) Enable beta resources. Default is false.
|
||||||
|
- `experiments` (List of String) Enables experiments. These are unstable features without official support. More information can be found in the README. Available Experiments: iam, routing-tables, network
|
||||||
|
- `git_custom_endpoint` (String) Custom endpoint for the Git service
|
||||||
|
- `iaas_custom_endpoint` (String) Custom endpoint for the IaaS service
|
||||||
|
- `kms_custom_endpoint` (String) Custom endpoint for the KMS service
|
||||||
|
- `loadbalancer_custom_endpoint` (String) Custom endpoint for the Load Balancer service
|
||||||
|
- `logme_custom_endpoint` (String) Custom endpoint for the LogMe service
|
||||||
|
- `mariadb_custom_endpoint` (String) Custom endpoint for the MariaDB service
|
||||||
|
- `modelserving_custom_endpoint` (String) Custom endpoint for the AI Model Serving service
|
||||||
|
- `mongodbflex_custom_endpoint` (String) Custom endpoint for the MongoDB Flex service
|
||||||
|
- `objectstorage_custom_endpoint` (String) Custom endpoint for the Object Storage service
|
||||||
|
- `observability_custom_endpoint` (String) Custom endpoint for the Observability service
|
||||||
|
- `opensearch_custom_endpoint` (String) Custom endpoint for the OpenSearch service
|
||||||
|
- `postgresflex_custom_endpoint` (String) Custom endpoint for the PostgresFlex service
|
||||||
|
- `private_key` (String) Private RSA key used for authentication, relevant for the key flow. It takes precedence over the private key that is included in the service account key.
|
||||||
|
- `private_key_path` (String) Path for the private RSA key used for authentication, relevant for the key flow. It takes precedence over the private key that is included in the service account key.
|
||||||
|
- `rabbitmq_custom_endpoint` (String) Custom endpoint for the RabbitMQ service
|
||||||
|
- `redis_custom_endpoint` (String) Custom endpoint for the Redis service
|
||||||
|
- `region` (String, Deprecated) Region will be used as the default location for regional services. Not all services require a region, some are global
|
||||||
|
- `resourcemanager_custom_endpoint` (String) Custom endpoint for the Resource Manager service
|
||||||
|
- `scf_custom_endpoint` (String) Custom endpoint for the Cloud Foundry (SCF) service
|
||||||
|
- `secretsmanager_custom_endpoint` (String) Custom endpoint for the Secrets Manager service
|
||||||
|
- `server_backup_custom_endpoint` (String) Custom endpoint for the Server Backup service
|
||||||
|
- `server_update_custom_endpoint` (String) Custom endpoint for the Server Update service
|
||||||
|
- `service_account_custom_endpoint` (String) Custom endpoint for the Service Account service
|
||||||
|
- `service_account_email` (String, Deprecated) Service account email. It can also be set using the environment variable STACKIT_SERVICE_ACCOUNT_EMAIL. It is required if you want to use the resource manager project resource.
|
||||||
|
- `service_account_key` (String) Service account key used for authentication. If set, the key flow will be used to authenticate all operations.
|
||||||
|
- `service_account_key_path` (String) Path for the service account key used for authentication. If set, the key flow will be used to authenticate all operations.
|
||||||
|
- `service_account_token` (String, Deprecated) Token used for authentication. If set, the token flow will be used to authenticate all operations.
|
||||||
|
- `service_enablement_custom_endpoint` (String) Custom endpoint for the Service Enablement API
|
||||||
|
- `ske_custom_endpoint` (String) Custom endpoint for the Kubernetes Engine (SKE) service
|
||||||
|
- `sqlserverflex_custom_endpoint` (String) Custom endpoint for the SQL Server Flex service
|
||||||
|
- `token_custom_endpoint` (String) Custom endpoint for the token API, which is used to request access tokens when using the key flow
|
||||||
57
docs/resources/postgresflexalpha_database.md
Normal file
57
docs/resources/postgresflexalpha_database.md
Normal file
|
|
@ -0,0 +1,57 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_postgresflexalpha_database Resource - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_postgresflexalpha_database (Resource)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
resource "stackitprivatepreview_postgresflexalpha_database" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
name = "mydb"
|
||||||
|
owner = "myusername"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Only use the import statement, if you want to import an existing postgresflex database
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_postgresflexalpha_database.import-example
|
||||||
|
id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.postgres_database_id}"
|
||||||
|
}
|
||||||
|
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_postgresflexalpha_database.import-example
|
||||||
|
identity = {
|
||||||
|
project_id = "project_id"
|
||||||
|
region = "region"
|
||||||
|
instance_id = "instance_id"
|
||||||
|
database_id = "database_id"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `name` (String) The name of the database.
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `database_id` (Number) The ID of the database.
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `owner` (String) The owner of the database.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `id` (Number) The id of the database.
|
||||||
138
docs/resources/postgresflexalpha_instance.md
Normal file
138
docs/resources/postgresflexalpha_instance.md
Normal file
|
|
@ -0,0 +1,138 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_postgresflexalpha_instance Resource - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_postgresflexalpha_instance (Resource)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
resource "stackitprivatepreview_postgresflexalpha_instance" "example-instance" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
name = "example-instance"
|
||||||
|
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
||||||
|
backup_schedule = "0 0 * * *"
|
||||||
|
retention_days = 30
|
||||||
|
flavor_id = "flavor.id"
|
||||||
|
replicas = 1
|
||||||
|
storage = {
|
||||||
|
performance_class = "premium-perf2-stackit"
|
||||||
|
size = 10
|
||||||
|
}
|
||||||
|
encryption = {
|
||||||
|
kek_key_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
kek_key_ring_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
kek_key_version = 1
|
||||||
|
service_account = "service@account.email"
|
||||||
|
}
|
||||||
|
network = {
|
||||||
|
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
||||||
|
access_scope = "PUBLIC"
|
||||||
|
}
|
||||||
|
version = 17
|
||||||
|
}
|
||||||
|
|
||||||
|
# Only use the import statement, if you want to import an existing postgresflex instance
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_postgresflexalpha_instance.import-example
|
||||||
|
id = "${var.project_id},${var.region},${var.postgres_instance_id}"
|
||||||
|
}
|
||||||
|
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_postgresflexalpha_instance.import-example
|
||||||
|
identity = {
|
||||||
|
project_id = var.project_id
|
||||||
|
region = var.region
|
||||||
|
instance_id = var.postgres_instance_id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `backup_schedule` (String) The schedule for when the database backup will be created. Currently, ONLY daily schedules are supported (every 24 hours). The schedule is written as a cron schedule.
|
||||||
|
- `flavor_id` (String) The id of the instance flavor.
|
||||||
|
- `name` (String) The name of the instance.
|
||||||
|
- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
|
||||||
|
- `replicas` (Number) How many replicas the instance should have.
|
||||||
|
- `retention_days` (Number) How long backups are retained. The value can only be between 32 and 365 days.
|
||||||
|
- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
|
||||||
|
- `version` (String) The Postgres version used for the instance. See [Versions Endpoint](/documentation/postgres-flex-service/version/v3alpha1#tag/Version) for supported version parameters.
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `encryption` (Attributes) The configuration for instance's volume and backup storage encryption.
|
||||||
|
|
||||||
|
⚠︝ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected. (see [below for nested schema](#nestedatt--encryption))
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `acl` (List of String) List of IPV4 cidr.
|
||||||
|
- `connection_info` (Attributes) The connection information of the instance (see [below for nested schema](#nestedatt--connection_info))
|
||||||
|
- `id` (String) The ID of the instance.
|
||||||
|
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
|
||||||
|
- `status` (String) The current status of the instance.
|
||||||
|
|
||||||
|
<a id="nestedatt--network"></a>
|
||||||
|
### Nested Schema for `network`
|
||||||
|
|
||||||
|
Required:
|
||||||
|
|
||||||
|
- `acl` (List of String) List of IPV4 cidr.
|
||||||
|
|
||||||
|
Optional:
|
||||||
|
|
||||||
|
- `access_scope` (String) The access scope of the instance. It defines if the instance is public or airgapped.
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `instance_address` (String)
|
||||||
|
- `router_address` (String)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--storage"></a>
|
||||||
|
### Nested Schema for `storage`
|
||||||
|
|
||||||
|
Required:
|
||||||
|
|
||||||
|
- `performance_class` (String) The storage class for the storage.
|
||||||
|
- `size` (Number) The storage size in Gigabytes.
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--encryption"></a>
|
||||||
|
### Nested Schema for `encryption`
|
||||||
|
|
||||||
|
Required:
|
||||||
|
|
||||||
|
- `kek_key_id` (String) The encryption-key key identifier
|
||||||
|
- `kek_key_ring_id` (String) The encryption-key keyring identifier
|
||||||
|
- `kek_key_version` (String) The encryption-key version
|
||||||
|
- `service_account` (String)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--connection_info"></a>
|
||||||
|
### Nested Schema for `connection_info`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `write` (Attributes) The DNS name and port in the instance overview (see [below for nested schema](#nestedatt--connection_info--write))
|
||||||
|
|
||||||
|
<a id="nestedatt--connection_info--write"></a>
|
||||||
|
### Nested Schema for `connection_info.write`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `host` (String) The host of the instance.
|
||||||
|
- `port` (Number) The port of the instance.
|
||||||
59
docs/resources/postgresflexalpha_user.md
Normal file
59
docs/resources/postgresflexalpha_user.md
Normal file
|
|
@ -0,0 +1,59 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_postgresflexalpha_user Resource - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_postgresflexalpha_user (Resource)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
resource "stackitprivatepreview_postgresflexalpha_user" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
name = "username"
|
||||||
|
roles = ["role"]
|
||||||
|
}
|
||||||
|
|
||||||
|
# Only use the import statement, if you want to import an existing postgresflex user
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_postgresflexalpha_user.import-example
|
||||||
|
id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.user_id}"
|
||||||
|
}
|
||||||
|
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_postgresflexalpha_user.import-example
|
||||||
|
identity = {
|
||||||
|
project_id = "project.id"
|
||||||
|
region = "region"
|
||||||
|
instance_id = "instance.id"
|
||||||
|
user_id = "user.id"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `name` (String) The name of the user.
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
- `roles` (List of String) A list containing the user roles for the instance.
|
||||||
|
- `user_id` (Number) The ID of the user.
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `id` (Number) The ID of the user.
|
||||||
|
- `password` (String) The password for the user.
|
||||||
|
- `status` (String) The current status of the user.
|
||||||
63
docs/resources/sqlserverflexalpha_database.md
Normal file
63
docs/resources/sqlserverflexalpha_database.md
Normal file
|
|
@ -0,0 +1,63 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexalpha_database Resource - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexalpha_database (Resource)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
resource "stackitprivatepreview_sqlserverflexalpha_database" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
collation = ""
|
||||||
|
compatibility = "160"
|
||||||
|
name = ""
|
||||||
|
owner = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
# Only use the import statement, if you want to import a existing sqlserverflex database
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_sqlserverflexalpha_database.import-example
|
||||||
|
id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
|
||||||
|
}
|
||||||
|
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_sqlserverflexalpha_database.import-example
|
||||||
|
identity = {
|
||||||
|
project_id = "project.id"
|
||||||
|
region = "region"
|
||||||
|
instance_id = "instance.id"
|
||||||
|
database_id = "database.id"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `name` (String) The name of the database.
|
||||||
|
- `owner` (String) The owner of the database.
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `collation` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
|
||||||
|
- `compatibility` (Number) CompatibilityLevel of the Database.
|
||||||
|
- `database_name` (String) The name of the database.
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
|
||||||
|
- `compatibility_level` (Number) CompatibilityLevel of the Database.
|
||||||
|
- `id` (Number) The id of the database.
|
||||||
103
docs/resources/sqlserverflexalpha_instance.md
Normal file
103
docs/resources/sqlserverflexalpha_instance.md
Normal file
|
|
@ -0,0 +1,103 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexalpha_instance Resource - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexalpha_instance (Resource)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
resource "stackitprivatepreview_sqlserverflexalpha_instance" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
name = "example-instance"
|
||||||
|
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
||||||
|
backup_schedule = "00 00 * * *"
|
||||||
|
flavor = {
|
||||||
|
cpu = 4
|
||||||
|
ram = 16
|
||||||
|
}
|
||||||
|
storage = {
|
||||||
|
class = "class"
|
||||||
|
size = 5
|
||||||
|
}
|
||||||
|
version = 2022
|
||||||
|
}
|
||||||
|
|
||||||
|
# Only use the import statement, if you want to import an existing sqlserverflex instance
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_sqlserverflexalpha_instance.import-example
|
||||||
|
id = "${var.project_id},${var.region},${var.sql_instance_id}"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
|
||||||
|
- `flavor_id` (String) The id of the instance flavor.
|
||||||
|
- `name` (String) The name of the instance.
|
||||||
|
- `network` (Attributes) the network configuration of the instance. (see [below for nested schema](#nestedatt--network))
|
||||||
|
- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365
|
||||||
|
- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
|
||||||
|
- `version` (String) The sqlserver version used for the instance.
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption))
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `edition` (String) Edition of the MSSQL server instance
|
||||||
|
- `id` (String) The ID of the instance.
|
||||||
|
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
|
||||||
|
- `replicas` (Number) How many replicas the instance should have.
|
||||||
|
- `status` (String)
|
||||||
|
|
||||||
|
<a id="nestedatt--network"></a>
|
||||||
|
### Nested Schema for `network`
|
||||||
|
|
||||||
|
Required:
|
||||||
|
|
||||||
|
- `acl` (List of String) List of IPV4 cidr.
|
||||||
|
|
||||||
|
Optional:
|
||||||
|
|
||||||
|
- `access_scope` (String) The network access scope of the instance
|
||||||
|
|
||||||
|
⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `instance_address` (String)
|
||||||
|
- `router_address` (String)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--storage"></a>
|
||||||
|
### Nested Schema for `storage`
|
||||||
|
|
||||||
|
Required:
|
||||||
|
|
||||||
|
- `class` (String) The storage class for the storage.
|
||||||
|
- `size` (Number) The storage size in Gigabytes.
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--encryption"></a>
|
||||||
|
### Nested Schema for `encryption`
|
||||||
|
|
||||||
|
Required:
|
||||||
|
|
||||||
|
- `kek_key_id` (String) The key identifier
|
||||||
|
- `kek_key_ring_id` (String) The keyring identifier
|
||||||
|
- `kek_key_version` (String) The key version
|
||||||
|
- `service_account` (String)
|
||||||
53
docs/resources/sqlserverflexalpha_user.md
Normal file
53
docs/resources/sqlserverflexalpha_user.md
Normal file
|
|
@ -0,0 +1,53 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexalpha_user Resource - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexalpha_user (Resource)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
resource "stackitprivatepreview_sqlserverflexalpha_user" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
username = "username"
|
||||||
|
roles = ["role"]
|
||||||
|
}
|
||||||
|
|
||||||
|
# Only use the import statement, if you want to import an existing sqlserverflex user
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_sqlserverflexalpha_user.import-example
|
||||||
|
id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `roles` (List of String) A list containing the user roles for the instance. A list with the valid user roles can be retrieved using the List Roles endpoint.
|
||||||
|
- `username` (String) The name of the user.
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `default_database` (String) The default database for a user of the instance.
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
- `user_id` (Number) The ID of the user.
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `host` (String) The host of the instance in which the user belongs to.
|
||||||
|
- `id` (Number) The ID of the user.
|
||||||
|
- `password` (String) The password for the user.
|
||||||
|
- `port` (Number) The port of the instance in which the user belongs to.
|
||||||
|
- `status` (String) The current status of the user.
|
||||||
|
- `uri` (String) The connection string for the user to the instance.
|
||||||
51
docs/resources/sqlserverflexbeta_database.md
Normal file
51
docs/resources/sqlserverflexbeta_database.md
Normal file
|
|
@ -0,0 +1,51 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexbeta_database Resource - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexbeta_database (Resource)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
resource "stackitprivatepreview_sqlserverflexalpha_user" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
username = "username"
|
||||||
|
roles = ["role"]
|
||||||
|
}
|
||||||
|
|
||||||
|
# Only use the import statement, if you want to import an existing sqlserverflex user
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_sqlserverflexalpha_user.import-example
|
||||||
|
id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `name` (String) The name of the database.
|
||||||
|
- `owner` (String) The owner of the database.
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `collation` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
|
||||||
|
- `compatibility` (Number) CompatibilityLevel of the Database.
|
||||||
|
- `database_name` (String) The name of the database.
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
|
||||||
|
- `compatibility_level` (Number) CompatibilityLevel of the Database.
|
||||||
|
- `id` (Number) The id of the database.
|
||||||
158
docs/resources/sqlserverflexbeta_instance.md
Normal file
158
docs/resources/sqlserverflexbeta_instance.md
Normal file
|
|
@ -0,0 +1,158 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexbeta_instance Resource - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexbeta_instance (Resource)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
# without encryption and SNA
|
||||||
|
resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
name = "example-instance"
|
||||||
|
backup_schedule = "0 3 * * *"
|
||||||
|
retention_days = 31
|
||||||
|
flavor_id = "flavor_id"
|
||||||
|
storage = {
|
||||||
|
class = "premium-perf2-stackit"
|
||||||
|
size = 50
|
||||||
|
}
|
||||||
|
version = 2022
|
||||||
|
network = {
|
||||||
|
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
||||||
|
access_scope = "SNA"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# without encryption and PUBLIC
|
||||||
|
resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
name = "example-instance"
|
||||||
|
backup_schedule = "0 3 * * *"
|
||||||
|
retention_days = 31
|
||||||
|
flavor_id = "flavor_id"
|
||||||
|
storage = {
|
||||||
|
class = "premium-perf2-stackit"
|
||||||
|
size = 50
|
||||||
|
}
|
||||||
|
version = 2022
|
||||||
|
network = {
|
||||||
|
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
||||||
|
access_scope = "PUBLIC"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# with encryption and SNA
|
||||||
|
resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
name = "example-instance"
|
||||||
|
backup_schedule = "0 3 * * *"
|
||||||
|
retention_days = 31
|
||||||
|
flavor_id = "flavor_id"
|
||||||
|
storage = {
|
||||||
|
class = "premium-perf2-stackit"
|
||||||
|
size = 50
|
||||||
|
}
|
||||||
|
version = 2022
|
||||||
|
encryption = {
|
||||||
|
kek_key_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
kek_key_ring_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
kek_key_version = 1
|
||||||
|
service_account = "service_account@email"
|
||||||
|
}
|
||||||
|
network = {
|
||||||
|
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
||||||
|
access_scope = "SNA"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Only use the import statement, if you want to import an existing sqlserverflex instance
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_sqlserverflexalpha_instance.import-example
|
||||||
|
id = "${var.project_id},${var.region},${var.sql_instance_id}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# import with identity
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_sqlserverflexalpha_instance.import-example
|
||||||
|
identity = {
|
||||||
|
project_id = var.project_id
|
||||||
|
region = var.region
|
||||||
|
instance_id = var.sql_instance_id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
|
||||||
|
- `flavor_id` (String) The id of the instance flavor.
|
||||||
|
- `name` (String) The name of the instance.
|
||||||
|
- `network` (Attributes) the network configuration of the instance. (see [below for nested schema](#nestedatt--network))
|
||||||
|
- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365
|
||||||
|
- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
|
||||||
|
- `version` (String) The sqlserver version used for the instance.
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption))
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `edition` (String) Edition of the MSSQL server instance
|
||||||
|
- `id` (String) The ID of the instance.
|
||||||
|
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
|
||||||
|
- `replicas` (Number) How many replicas the instance should have.
|
||||||
|
- `status` (String)
|
||||||
|
|
||||||
|
<a id="nestedatt--network"></a>
|
||||||
|
### Nested Schema for `network`
|
||||||
|
|
||||||
|
Required:
|
||||||
|
|
||||||
|
- `acl` (List of String) List of IPV4 cidr.
|
||||||
|
|
||||||
|
Optional:
|
||||||
|
|
||||||
|
- `access_scope` (String) The network access scope of the instance
|
||||||
|
|
||||||
|
⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `instance_address` (String)
|
||||||
|
- `router_address` (String)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--storage"></a>
|
||||||
|
### Nested Schema for `storage`
|
||||||
|
|
||||||
|
Required:
|
||||||
|
|
||||||
|
- `class` (String) The storage class for the storage.
|
||||||
|
- `size` (Number) The storage size in Gigabytes.
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--encryption"></a>
|
||||||
|
### Nested Schema for `encryption`
|
||||||
|
|
||||||
|
Required:
|
||||||
|
|
||||||
|
- `kek_key_id` (String) The key identifier
|
||||||
|
- `kek_key_ring_id` (String) The keyring identifier
|
||||||
|
- `kek_key_version` (String) The key version
|
||||||
|
- `service_account` (String)
|
||||||
53
docs/resources/sqlserverflexbeta_user.md
Normal file
53
docs/resources/sqlserverflexbeta_user.md
Normal file
|
|
@ -0,0 +1,53 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexbeta_user Resource - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexbeta_user (Resource)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
resource "stackitprivatepreview_sqlserverflexalpha_user" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
username = "username"
|
||||||
|
roles = ["role"]
|
||||||
|
}
|
||||||
|
|
||||||
|
# Only use the import statement, if you want to import an existing sqlserverflex user
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_sqlserverflexalpha_user.import-example
|
||||||
|
id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `roles` (List of String) A list containing the user roles for the instance. A list with the valid user roles can be retrieved using the List Roles endpoint.
|
||||||
|
- `username` (String) The name of the user.
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `default_database` (String) The default database for a user of the instance.
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
- `user_id` (Number) The ID of the user.
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `host` (String) The host of the instance in which the user belongs to.
|
||||||
|
- `id` (Number) The ID of the user.
|
||||||
|
- `password` (String) The password for the user.
|
||||||
|
- `port` (Number) The port of the instance in which the user belongs to.
|
||||||
|
- `status` (String) The current status of the user.
|
||||||
|
- `uri` (String) The connection string for the user to the instance.
|
||||||
346
generator/cmd/build/build.go
Normal file
346
generator/cmd/build/build.go
Normal file
|
|
@ -0,0 +1,346 @@
|
||||||
|
package build
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/parser"
|
||||||
|
"go/token"
|
||||||
|
"log/slog"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"path"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Builder struct {
|
||||||
|
rootDir string
|
||||||
|
SkipClone bool
|
||||||
|
SkipCleanup bool
|
||||||
|
PackagesOnly bool
|
||||||
|
Verbose bool
|
||||||
|
Debug bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) Build() error {
|
||||||
|
slog.Info("Starting Builder")
|
||||||
|
if b.PackagesOnly {
|
||||||
|
slog.Info(" >>> only generating pkg_gen <<<")
|
||||||
|
}
|
||||||
|
|
||||||
|
rootErr := b.determineRoot()
|
||||||
|
if rootErr != nil {
|
||||||
|
return rootErr
|
||||||
|
}
|
||||||
|
|
||||||
|
if !b.PackagesOnly {
|
||||||
|
if b.Verbose {
|
||||||
|
slog.Info(" ... Checking needed commands available")
|
||||||
|
}
|
||||||
|
chkErr := checkCommands([]string{})
|
||||||
|
if chkErr != nil {
|
||||||
|
return chkErr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// if !b.SkipCleanup {
|
||||||
|
// slog.Info("Cleaning up old packages directory")
|
||||||
|
// err := os.RemoveAll(path.Join(b.rootDir, "pkg_gen"))
|
||||||
|
// if err != nil {
|
||||||
|
// return err
|
||||||
|
// }
|
||||||
|
//}
|
||||||
|
//
|
||||||
|
// if !b.SkipCleanup && !b.PackagesOnly {
|
||||||
|
// slog.Info("Cleaning up old packages directory")
|
||||||
|
// err := os.RemoveAll(path.Join(b.rootDir, "pkg_gen"))
|
||||||
|
// if err != nil {
|
||||||
|
// return err
|
||||||
|
// }
|
||||||
|
//}
|
||||||
|
|
||||||
|
// slog.Info("Creating generator dir", "dir", fmt.Sprintf("%s/%s", *root, GEN_REPO_NAME))
|
||||||
|
// genDir := path.Join(*root, GEN_REPO_NAME)
|
||||||
|
// if !b.SkipClone {
|
||||||
|
// err = createGeneratorDir(GEN_REPO, genDir, b.SkipClone)
|
||||||
|
// if err != nil {
|
||||||
|
// return err
|
||||||
|
// }
|
||||||
|
//}
|
||||||
|
|
||||||
|
oasHandlerErr := b.oasHandler(path.Join(b.rootDir, "service_specs"))
|
||||||
|
if oasHandlerErr != nil {
|
||||||
|
return oasHandlerErr
|
||||||
|
}
|
||||||
|
|
||||||
|
// if !b.PackagesOnly {
|
||||||
|
// slog.Info("Generating service boilerplate")
|
||||||
|
// err = generateServiceFiles(*root, path.Join(*root, GEN_REPO_NAME))
|
||||||
|
// if err != nil {
|
||||||
|
// return err
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// slog.Info("Copying all service files")
|
||||||
|
// err = CopyDirectory(
|
||||||
|
// path.Join(*root, "generated", "internal", "services"),
|
||||||
|
// path.Join(*root, "stackit", "internal", "services"),
|
||||||
|
// )
|
||||||
|
// if err != nil {
|
||||||
|
// return err
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// err = createBoilerplate(*root, path.Join(*root, "stackit", "internal", "services"))
|
||||||
|
// if err != nil {
|
||||||
|
// return err
|
||||||
|
// }
|
||||||
|
//}
|
||||||
|
|
||||||
|
// workaround to remove linter complain :D
|
||||||
|
if b.PackagesOnly && b.Verbose && b.SkipClone && b.SkipCleanup {
|
||||||
|
bpErr := createBoilerplate(b.rootDir, "boilerplate")
|
||||||
|
if bpErr != nil {
|
||||||
|
return bpErr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
slog.Info("Done")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type templateData struct {
|
||||||
|
PackageName string
|
||||||
|
PackageNameCamel string
|
||||||
|
PackageNamePascal string
|
||||||
|
NameCamel string
|
||||||
|
NamePascal string
|
||||||
|
NameSnake string
|
||||||
|
Fields []string
|
||||||
|
}
|
||||||
|
|
||||||
|
func createBoilerplate(rootFolder, folder string) error {
|
||||||
|
services, err := os.ReadDir(folder)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
for _, svc := range services {
|
||||||
|
if !svc.IsDir() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
resources, err := os.ReadDir(path.Join(folder, svc.Name()))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
var handleDS bool
|
||||||
|
var handleRes bool
|
||||||
|
var foundDS bool
|
||||||
|
var foundRes bool
|
||||||
|
|
||||||
|
for _, res := range resources {
|
||||||
|
if !res.IsDir() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
resourceName := res.Name()
|
||||||
|
|
||||||
|
dsFile := path.Join(
|
||||||
|
folder,
|
||||||
|
svc.Name(),
|
||||||
|
res.Name(),
|
||||||
|
"datasources_gen",
|
||||||
|
fmt.Sprintf("%s_data_source_gen.go", res.Name()),
|
||||||
|
)
|
||||||
|
handleDS = FileExists(dsFile)
|
||||||
|
|
||||||
|
resFile := path.Join(
|
||||||
|
folder,
|
||||||
|
svc.Name(),
|
||||||
|
res.Name(),
|
||||||
|
"resources_gen",
|
||||||
|
fmt.Sprintf("%s_resource_gen.go", res.Name()),
|
||||||
|
)
|
||||||
|
handleRes = FileExists(resFile)
|
||||||
|
|
||||||
|
dsGoFile := path.Join(folder, svc.Name(), res.Name(), "datasource.go")
|
||||||
|
foundDS = FileExists(dsGoFile)
|
||||||
|
|
||||||
|
resGoFile := path.Join(folder, svc.Name(), res.Name(), "resource.go")
|
||||||
|
foundRes = FileExists(resGoFile)
|
||||||
|
|
||||||
|
if handleDS && !foundDS {
|
||||||
|
slog.Info(" creating missing datasource.go", "service", svc.Name(), "resource", resourceName)
|
||||||
|
if !ValidateSnakeCase(resourceName) {
|
||||||
|
return errors.New("resource name is invalid")
|
||||||
|
}
|
||||||
|
|
||||||
|
fields, tokenErr := getTokens(dsFile)
|
||||||
|
if tokenErr != nil {
|
||||||
|
return fmt.Errorf("error reading tokens: %w", tokenErr)
|
||||||
|
}
|
||||||
|
|
||||||
|
tplName := "data_source_scaffold.gotmpl"
|
||||||
|
err = writeTemplateToFile(
|
||||||
|
tplName,
|
||||||
|
path.Join(rootFolder, "cmd", "cmd", "build", "templates", tplName),
|
||||||
|
dsGoFile,
|
||||||
|
&templateData{
|
||||||
|
PackageName: svc.Name(),
|
||||||
|
PackageNameCamel: ToCamelCase(svc.Name()),
|
||||||
|
PackageNamePascal: ToPascalCase(svc.Name()),
|
||||||
|
NameCamel: ToCamelCase(resourceName),
|
||||||
|
NamePascal: ToPascalCase(resourceName),
|
||||||
|
NameSnake: resourceName,
|
||||||
|
Fields: fields,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if handleRes && !foundRes {
|
||||||
|
slog.Info(" creating missing resource.go", "service", svc.Name(), "resource", resourceName)
|
||||||
|
if !ValidateSnakeCase(resourceName) {
|
||||||
|
return errors.New("resource name is invalid")
|
||||||
|
}
|
||||||
|
|
||||||
|
fields, tokenErr := getTokens(resFile)
|
||||||
|
if tokenErr != nil {
|
||||||
|
return fmt.Errorf("error reading tokens: %w", tokenErr)
|
||||||
|
}
|
||||||
|
|
||||||
|
tplName := "resource_scaffold.gotmpl"
|
||||||
|
err = writeTemplateToFile(
|
||||||
|
tplName,
|
||||||
|
path.Join(rootFolder, "cmd", "cmd", "build", "templates", tplName),
|
||||||
|
resGoFile,
|
||||||
|
&templateData{
|
||||||
|
PackageName: svc.Name(),
|
||||||
|
PackageNameCamel: ToCamelCase(svc.Name()),
|
||||||
|
PackageNamePascal: ToPascalCase(svc.Name()),
|
||||||
|
NameCamel: ToCamelCase(resourceName),
|
||||||
|
NamePascal: ToPascalCase(resourceName),
|
||||||
|
NameSnake: resourceName,
|
||||||
|
Fields: fields,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if !FileExists(path.Join(folder, svc.Name(), res.Name(), "functions.go")) {
|
||||||
|
slog.Info(" creating missing functions.go", "service", svc.Name(), "resource", resourceName)
|
||||||
|
if !ValidateSnakeCase(resourceName) {
|
||||||
|
return errors.New("resource name is invalid")
|
||||||
|
}
|
||||||
|
fncTplName := "functions_scaffold.gotmpl"
|
||||||
|
err = writeTemplateToFile(
|
||||||
|
fncTplName,
|
||||||
|
path.Join(rootFolder, "cmd", "cmd", "build", "templates", fncTplName),
|
||||||
|
path.Join(folder, svc.Name(), res.Name(), "functions.go"),
|
||||||
|
&templateData{
|
||||||
|
PackageName: svc.Name(),
|
||||||
|
PackageNameCamel: ToCamelCase(svc.Name()),
|
||||||
|
PackageNamePascal: ToPascalCase(svc.Name()),
|
||||||
|
NameCamel: ToCamelCase(resourceName),
|
||||||
|
NamePascal: ToPascalCase(resourceName),
|
||||||
|
NameSnake: resourceName,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func handleLine(line string) (string, error) {
|
||||||
|
schemaRegex := regexp.MustCompile(`(\s+")(id)(": schema.[a-zA-Z0-9]+Attribute{)`)
|
||||||
|
|
||||||
|
schemaMatches := schemaRegex.FindAllStringSubmatch(line, -1)
|
||||||
|
if schemaMatches != nil {
|
||||||
|
return fmt.Sprintf("%stf_original_api_id%s", schemaMatches[0][1], schemaMatches[0][3]), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
modelRegex := regexp.MustCompile(`(\s+Id\s+types.[a-zA-Z0-9]+\s+.tfsdk:")(id)(".)`)
|
||||||
|
modelMatches := modelRegex.FindAllStringSubmatch(line, -1)
|
||||||
|
if modelMatches != nil {
|
||||||
|
return fmt.Sprintf("%stf_original_api_id%s", modelMatches[0][1], modelMatches[0][3]), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return line, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) determineRoot() error {
|
||||||
|
cmd := exec.Command("git", "rev-parse", "--show-toplevel")
|
||||||
|
out, err := cmd.Output()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
lines := strings.Split(string(out), "\n")
|
||||||
|
if lines[0] == "" {
|
||||||
|
return fmt.Errorf("unable to determine root directory from git")
|
||||||
|
}
|
||||||
|
b.rootDir = lines[0]
|
||||||
|
if b.Verbose {
|
||||||
|
slog.Info(" ... using root", "dir", b.rootDir)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// func createGeneratorDir(repoUrl, targetDir string, skipClone bool) error {
|
||||||
|
// if !skipClone {
|
||||||
|
// if FileExists(targetDir) {
|
||||||
|
// remErr := os.RemoveAll(targetDir)
|
||||||
|
// if remErr != nil {
|
||||||
|
// return remErr
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// _, cloneErr := git.Clone(
|
||||||
|
// clone.Repository(repoUrl),
|
||||||
|
// clone.Directory(targetDir),
|
||||||
|
// )
|
||||||
|
// if cloneErr != nil {
|
||||||
|
// return cloneErr
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// return nil
|
||||||
|
//}
|
||||||
|
|
||||||
|
func getTokens(fileName string) ([]string, error) {
|
||||||
|
fset := token.NewFileSet()
|
||||||
|
|
||||||
|
var result []string
|
||||||
|
|
||||||
|
node, err := parser.ParseFile(fset, fileName, nil, parser.ParseComments)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
ast.Inspect(
|
||||||
|
node, func(n ast.Node) bool {
|
||||||
|
// Suche nach Typ-Deklarationen (structs)
|
||||||
|
ts, ok := n.(*ast.TypeSpec)
|
||||||
|
if ok {
|
||||||
|
if strings.Contains(ts.Name.Name, "Model") {
|
||||||
|
ast.Inspect(
|
||||||
|
ts, func(sn ast.Node) bool {
|
||||||
|
tts, tok := sn.(*ast.Field)
|
||||||
|
if tok {
|
||||||
|
result = append(result, tts.Names[0].String())
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
},
|
||||||
|
)
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
120
generator/cmd/build/functions.go
Normal file
120
generator/cmd/build/functions.go
Normal file
|
|
@ -0,0 +1,120 @@
|
||||||
|
package build
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"log/slog"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"strings"
|
||||||
|
"text/template"
|
||||||
|
)
|
||||||
|
|
||||||
|
func FileExists(pathValue string) bool {
|
||||||
|
_, err := os.Stat(pathValue)
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func ucfirst(s string) string {
|
||||||
|
if s == "" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return strings.ToUpper(s[:1]) + s[1:]
|
||||||
|
}
|
||||||
|
|
||||||
|
func writeTemplateToFile(tplName, tplFile, outFile string, data *templateData) error {
|
||||||
|
fn := template.FuncMap{
|
||||||
|
"ucfirst": ucfirst,
|
||||||
|
}
|
||||||
|
|
||||||
|
tmpl, err := template.New(tplName).Funcs(fn).ParseFiles(tplFile)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
var f *os.File
|
||||||
|
f, err = os.Create(outFile)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
err = tmpl.Execute(f, *data)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
err = f.Close()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
/* saved for later
|
||||||
|
func deleteFiles(fNames ...string) error {
|
||||||
|
for _, fName := range fNames {
|
||||||
|
if _, err := os.Stat(fName); !os.IsNotExist(err) {
|
||||||
|
err = os.Remove(fName)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func copyFile(src, dst string) (int64, error) {
|
||||||
|
sourceFileStat, err := os.Stat(src)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if !sourceFileStat.Mode().IsRegular() {
|
||||||
|
return 0, fmt.Errorf("%s is not a regular file", src)
|
||||||
|
}
|
||||||
|
|
||||||
|
source, err := os.Open(src)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
defer func(source *os.File) {
|
||||||
|
err := source.Close()
|
||||||
|
if err != nil {
|
||||||
|
slog.Error("copyFile", "err", err)
|
||||||
|
}
|
||||||
|
}(source)
|
||||||
|
|
||||||
|
destination, err := os.Create(dst)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
defer func(destination *os.File) {
|
||||||
|
err := destination.Close()
|
||||||
|
if err != nil {
|
||||||
|
slog.Error("copyFile", "err", err)
|
||||||
|
}
|
||||||
|
}(destination)
|
||||||
|
nBytes, err := io.Copy(destination, source)
|
||||||
|
return nBytes, err
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
func checkCommands(commands []string) error {
|
||||||
|
for _, commandName := range commands {
|
||||||
|
if !commandExists(commandName) {
|
||||||
|
return fmt.Errorf("missing command %s", commandName)
|
||||||
|
}
|
||||||
|
slog.Info(" found", "command", commandName)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func commandExists(cmd string) bool {
|
||||||
|
_, err := exec.LookPath(cmd)
|
||||||
|
return err == nil
|
||||||
|
}
|
||||||
446
generator/cmd/build/oas-handler.go
Normal file
446
generator/cmd/build/oas-handler.go
Normal file
|
|
@ -0,0 +1,446 @@
|
||||||
|
package build
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"bytes"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"log/slog"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"path"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"gopkg.in/yaml.v3"
|
||||||
|
|
||||||
|
"github.com/ldez/go-git-cmd-wrapper/v2/clone"
|
||||||
|
"github.com/ldez/go-git-cmd-wrapper/v2/git"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
OasRepoName = "stackit-api-specifications"
|
||||||
|
OasRepo = "https://github.com/stackitcloud/stackit-api-specifications.git"
|
||||||
|
|
||||||
|
ResTypeResource = "resources"
|
||||||
|
ResTypeDataSource = "datasources"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Data struct {
|
||||||
|
ServiceName string `yaml:",omitempty" json:",omitempty"`
|
||||||
|
Versions []Version `yaml:"versions" json:"versions"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Version struct {
|
||||||
|
Name string `yaml:"name" json:"name"`
|
||||||
|
Path string `yaml:"path" json:"path"`
|
||||||
|
}
|
||||||
|
|
||||||
|
var oasTempDir string
|
||||||
|
|
||||||
|
func (b *Builder) oasHandler(specDir string) error {
|
||||||
|
if b.Verbose {
|
||||||
|
slog.Info("creating schema files", "dir", specDir)
|
||||||
|
}
|
||||||
|
if _, err := os.Stat(specDir); os.IsNotExist(err) {
|
||||||
|
return fmt.Errorf("spec files directory does not exist")
|
||||||
|
}
|
||||||
|
|
||||||
|
err := b.createRepoDir(b.SkipClone)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("%s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
err2 := b.handleServices(specDir)
|
||||||
|
if err2 != nil {
|
||||||
|
return err2
|
||||||
|
}
|
||||||
|
|
||||||
|
if !b.SkipCleanup {
|
||||||
|
if b.Verbose {
|
||||||
|
slog.Info("Finally removing temporary files and directories")
|
||||||
|
}
|
||||||
|
err := os.RemoveAll(path.Join(b.rootDir, "generated"))
|
||||||
|
if err != nil {
|
||||||
|
slog.Error("RemoveAll", "dir", path.Join(b.rootDir, "generated"), "err", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
err = os.RemoveAll(oasTempDir)
|
||||||
|
if err != nil {
|
||||||
|
slog.Error("RemoveAll", "dir", oasTempDir, "err", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) handleServices(specDir string) error {
|
||||||
|
services, err := os.ReadDir(specDir)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, svc := range services {
|
||||||
|
if !svc.IsDir() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if b.Verbose {
|
||||||
|
slog.Info(" ... found", "service", svc.Name())
|
||||||
|
}
|
||||||
|
var svcVersions Data
|
||||||
|
svcVersions.ServiceName = svc.Name()
|
||||||
|
|
||||||
|
versionsErr := b.getServiceVersions(path.Join(specDir, svc.Name(), "generator_settings.yml"), &svcVersions)
|
||||||
|
if versionsErr != nil {
|
||||||
|
return versionsErr
|
||||||
|
}
|
||||||
|
|
||||||
|
oasSpecErr := b.generateServiceFiles(&svcVersions)
|
||||||
|
if oasSpecErr != nil {
|
||||||
|
return oasSpecErr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) getServiceVersions(confFile string, data *Data) error {
|
||||||
|
if _, cfgFileErr := os.Stat(confFile); os.IsNotExist(cfgFileErr) {
|
||||||
|
return fmt.Errorf("config file does not exist")
|
||||||
|
}
|
||||||
|
|
||||||
|
fileContent, fileErr := os.ReadFile(confFile)
|
||||||
|
if fileErr != nil {
|
||||||
|
return fileErr
|
||||||
|
}
|
||||||
|
convErr := yaml.Unmarshal(fileContent, &data)
|
||||||
|
if convErr != nil {
|
||||||
|
return convErr
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) createRepoDir(skipClone bool) error {
|
||||||
|
tmpDirName, err := os.MkdirTemp("", "oasbuild")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
oasTempDir = path.Join(tmpDirName, OasRepoName)
|
||||||
|
slog.Info("Creating oas repo dir", "dir", oasTempDir)
|
||||||
|
if !skipClone {
|
||||||
|
if FileExists(oasTempDir) {
|
||||||
|
slog.Warn("target dir exists - skipping", "targetDir", oasTempDir)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
out, cloneErr := git.Clone(
|
||||||
|
clone.Repository(OasRepo),
|
||||||
|
clone.Directory(oasTempDir),
|
||||||
|
)
|
||||||
|
if cloneErr != nil {
|
||||||
|
slog.Error("git clone error", "output", out)
|
||||||
|
return cloneErr
|
||||||
|
}
|
||||||
|
if b.Verbose {
|
||||||
|
slog.Info("git clone result", "output", out)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) generateServiceFiles(data *Data) error {
|
||||||
|
err := os.MkdirAll(path.Join(b.rootDir, "generated", "specs"), 0o750)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, v := range data.Versions {
|
||||||
|
specFiles, specsErr := os.ReadDir(path.Join(b.rootDir, "service_specs", data.ServiceName, v.Name))
|
||||||
|
if specsErr != nil {
|
||||||
|
return specsErr
|
||||||
|
}
|
||||||
|
for _, specFile := range specFiles {
|
||||||
|
if specFile.IsDir() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
r := regexp.MustCompile(`^(.*)_config.yml$`)
|
||||||
|
matches := r.FindAllStringSubmatch(specFile.Name(), -1)
|
||||||
|
if matches == nil {
|
||||||
|
slog.Warn(" skipping file (no regex match)", "file", specFile.Name())
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
srcSpecFile := path.Join(b.rootDir, "service_specs", data.ServiceName, v.Name, specFile.Name())
|
||||||
|
|
||||||
|
if matches[0][0] != specFile.Name() {
|
||||||
|
return fmt.Errorf("matched filename differs from original filename - this should not happen")
|
||||||
|
}
|
||||||
|
resource := matches[0][1]
|
||||||
|
if b.Verbose {
|
||||||
|
slog.Info(
|
||||||
|
" found service spec",
|
||||||
|
"service",
|
||||||
|
data.ServiceName,
|
||||||
|
"resource",
|
||||||
|
resource,
|
||||||
|
"file",
|
||||||
|
specFile.Name(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
oasFile := path.Join(
|
||||||
|
oasTempDir,
|
||||||
|
"services",
|
||||||
|
data.ServiceName,
|
||||||
|
v.Path,
|
||||||
|
fmt.Sprintf("%s.json", data.ServiceName),
|
||||||
|
)
|
||||||
|
if _, oasErr := os.Stat(oasFile); os.IsNotExist(oasErr) {
|
||||||
|
slog.Warn(
|
||||||
|
" could not find matching oas",
|
||||||
|
"svc",
|
||||||
|
data.ServiceName,
|
||||||
|
"version",
|
||||||
|
v.Name,
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// determine correct target service name
|
||||||
|
scName := fmt.Sprintf("%s%s", data.ServiceName, v.Name)
|
||||||
|
scName = strings.ReplaceAll(scName, "-", "")
|
||||||
|
|
||||||
|
specJSONFile := path.Join(
|
||||||
|
b.rootDir,
|
||||||
|
"generated",
|
||||||
|
"specs",
|
||||||
|
fmt.Sprintf("%s_%s_spec.json", scName, resource),
|
||||||
|
)
|
||||||
|
|
||||||
|
cmdErr := b.runTerraformPluginGenOpenAPI(srcSpecFile, specJSONFile, oasFile)
|
||||||
|
if cmdErr != nil {
|
||||||
|
return cmdErr
|
||||||
|
}
|
||||||
|
|
||||||
|
cmdResGenErr := b.runTerraformPluginGenFramework(ResTypeResource, scName, resource, specJSONFile)
|
||||||
|
if cmdResGenErr != nil {
|
||||||
|
return cmdResGenErr
|
||||||
|
}
|
||||||
|
|
||||||
|
cmdDsGenErr := b.runTerraformPluginGenFramework(ResTypeDataSource, scName, resource, specJSONFile)
|
||||||
|
if cmdDsGenErr != nil {
|
||||||
|
return cmdDsGenErr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) runTerraformPluginGenFramework(resType, svcName, resource, specJSONFile string) error {
|
||||||
|
var stdOut, stdErr bytes.Buffer
|
||||||
|
tgtFolder := path.Join(
|
||||||
|
b.rootDir,
|
||||||
|
"stackit",
|
||||||
|
"internal",
|
||||||
|
"services",
|
||||||
|
svcName,
|
||||||
|
resource,
|
||||||
|
fmt.Sprintf("%s_gen", resType),
|
||||||
|
)
|
||||||
|
|
||||||
|
//nolint:gosec // this file is not sensitive, so we can use 0755
|
||||||
|
err := os.MkdirAll(tgtFolder, 0o755)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
var subCmd string
|
||||||
|
switch resType {
|
||||||
|
case ResTypeResource:
|
||||||
|
subCmd = "resources"
|
||||||
|
case ResTypeDataSource:
|
||||||
|
subCmd = "data-sources"
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("unknown resource type given: %s", resType)
|
||||||
|
}
|
||||||
|
|
||||||
|
// nolint:gosec // #nosec this command is not using any untrusted input, so we can ignore gosec warning
|
||||||
|
cmd := exec.Command(
|
||||||
|
"tfplugingen-framework",
|
||||||
|
"generate",
|
||||||
|
subCmd,
|
||||||
|
"--input",
|
||||||
|
specJSONFile,
|
||||||
|
"--output",
|
||||||
|
tgtFolder,
|
||||||
|
"--package",
|
||||||
|
svcName,
|
||||||
|
)
|
||||||
|
|
||||||
|
cmd.Stdout = &stdOut
|
||||||
|
cmd.Stderr = &stdErr
|
||||||
|
if err = cmd.Start(); err != nil {
|
||||||
|
slog.Error(fmt.Sprintf("tfplugingen-framework generate %s", resType), "error", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err = cmd.Wait(); err != nil {
|
||||||
|
var exitErr *exec.ExitError
|
||||||
|
if errors.As(err, &exitErr) {
|
||||||
|
slog.Error(
|
||||||
|
fmt.Sprintf("tfplugingen-framework generate %s", resType),
|
||||||
|
"code",
|
||||||
|
exitErr.ExitCode(),
|
||||||
|
"error",
|
||||||
|
err,
|
||||||
|
"stdout",
|
||||||
|
stdOut.String(),
|
||||||
|
"stderr",
|
||||||
|
stdErr.String(),
|
||||||
|
)
|
||||||
|
return fmt.Errorf("%s", stdErr.String())
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
slog.Error(
|
||||||
|
fmt.Sprintf("tfplugingen-framework generate %s", resType),
|
||||||
|
"err",
|
||||||
|
err,
|
||||||
|
"stdout",
|
||||||
|
stdOut.String(),
|
||||||
|
"stderr",
|
||||||
|
stdErr.String(),
|
||||||
|
)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if resType == ResTypeDataSource {
|
||||||
|
tfAnoErr := b.handleTfTagForDatasourceFile(
|
||||||
|
path.Join(tgtFolder, fmt.Sprintf("%s_data_source_gen.go", resource)),
|
||||||
|
svcName,
|
||||||
|
resource,
|
||||||
|
)
|
||||||
|
if tfAnoErr != nil {
|
||||||
|
return tfAnoErr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) runTerraformPluginGenOpenAPI(srcSpecFile, specJSONFile, oasFile string) error {
|
||||||
|
var stdOut, stdErr bytes.Buffer
|
||||||
|
|
||||||
|
// nolint:gosec // #nosec this command is not using any untrusted input, so we can ignore gosec warning
|
||||||
|
cmd := exec.Command(
|
||||||
|
"tfplugingen-openapi",
|
||||||
|
"generate",
|
||||||
|
"--config",
|
||||||
|
srcSpecFile,
|
||||||
|
"--output",
|
||||||
|
specJSONFile,
|
||||||
|
oasFile,
|
||||||
|
)
|
||||||
|
cmd.Stdout = &stdOut
|
||||||
|
cmd.Stderr = &stdErr
|
||||||
|
|
||||||
|
if err := cmd.Start(); err != nil {
|
||||||
|
slog.Error(
|
||||||
|
"tfplugingen-openapi generate",
|
||||||
|
"error",
|
||||||
|
err,
|
||||||
|
"stdOut",
|
||||||
|
stdOut.String(),
|
||||||
|
"stdErr",
|
||||||
|
stdErr.String(),
|
||||||
|
)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := cmd.Wait(); err != nil {
|
||||||
|
var exitErr *exec.ExitError
|
||||||
|
if errors.As(err, &exitErr) {
|
||||||
|
slog.Error(
|
||||||
|
"tfplugingen-openapi generate",
|
||||||
|
"code",
|
||||||
|
exitErr.ExitCode(),
|
||||||
|
"error",
|
||||||
|
err,
|
||||||
|
"stdout",
|
||||||
|
stdOut.String(),
|
||||||
|
"stderr",
|
||||||
|
stdErr.String(),
|
||||||
|
)
|
||||||
|
return fmt.Errorf("%s", stdErr.String())
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
slog.Error(
|
||||||
|
"tfplugingen-openapi generate",
|
||||||
|
"err",
|
||||||
|
err,
|
||||||
|
"stdout",
|
||||||
|
stdOut.String(),
|
||||||
|
"stderr",
|
||||||
|
stdErr.String(),
|
||||||
|
)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if stdOut.Len() > 0 {
|
||||||
|
slog.Warn(" command output", "stdout", stdOut.String(), "stderr", stdErr.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// handleTfTagForDatasourceFile replaces existing "id" with "stf_original_api_id"
|
||||||
|
func (b *Builder) handleTfTagForDatasourceFile(filePath, service, resource string) error {
|
||||||
|
if b.Verbose {
|
||||||
|
slog.Info(" handle terraform tag for datasource", "service", service, "resource", resource)
|
||||||
|
}
|
||||||
|
if !FileExists(filePath) {
|
||||||
|
slog.Warn(" could not find file, skipping", "path", filePath)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
f, err := os.Open(filePath)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
tmp, err := os.CreateTemp(b.rootDir, "replace-*")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
sc := bufio.NewScanner(f)
|
||||||
|
for sc.Scan() {
|
||||||
|
resLine, err := handleLine(sc.Text())
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if _, err := tmp.WriteString(resLine + "\n"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if scErr := sc.Err(); scErr != nil {
|
||||||
|
return scErr
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := tmp.Close(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := f.Close(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
//nolint:gosec // path traversal is not a concern here
|
||||||
|
if err := os.Rename(tmp.Name(), filePath); err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
@ -3,13 +3,15 @@ package cmd
|
||||||
import (
|
import (
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/cmd/cmd/build"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/generator/cmd/build"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
skipCleanup bool
|
skipCleanup bool
|
||||||
skipClone bool
|
skipClone bool
|
||||||
packagesOnly bool
|
packagesOnly bool
|
||||||
|
verbose bool
|
||||||
|
debug bool
|
||||||
)
|
)
|
||||||
|
|
||||||
var buildCmd = &cobra.Command{
|
var buildCmd = &cobra.Command{
|
||||||
|
|
@ -21,6 +23,8 @@ var buildCmd = &cobra.Command{
|
||||||
SkipClone: skipClone,
|
SkipClone: skipClone,
|
||||||
SkipCleanup: skipCleanup,
|
SkipCleanup: skipCleanup,
|
||||||
PackagesOnly: packagesOnly,
|
PackagesOnly: packagesOnly,
|
||||||
|
Verbose: verbose,
|
||||||
|
Debug: debug,
|
||||||
}
|
}
|
||||||
return b.Build()
|
return b.Build()
|
||||||
},
|
},
|
||||||
|
|
@ -32,6 +36,8 @@ func NewBuildCmd() *cobra.Command {
|
||||||
|
|
||||||
func init() { //nolint:gochecknoinits // This is the standard way to set up Cobra commands
|
func init() { //nolint:gochecknoinits // This is the standard way to set up Cobra commands
|
||||||
buildCmd.Flags().BoolVarP(&skipCleanup, "skip-clean", "c", false, "Skip cleanup steps")
|
buildCmd.Flags().BoolVarP(&skipCleanup, "skip-clean", "c", false, "Skip cleanup steps")
|
||||||
|
buildCmd.Flags().BoolVarP(&debug, "debug", "d", false, "Enable debug output")
|
||||||
buildCmd.Flags().BoolVarP(&skipClone, "skip-clone", "g", false, "Skip cloning from git")
|
buildCmd.Flags().BoolVarP(&skipClone, "skip-clone", "g", false, "Skip cloning from git")
|
||||||
buildCmd.Flags().BoolVarP(&packagesOnly, "packages-only", "p", false, "Only generate packages")
|
buildCmd.Flags().BoolVarP(&packagesOnly, "packages-only", "p", false, "Only generate packages")
|
||||||
|
buildCmd.Flags().BoolVarP(&verbose, "verbose", "v", false, "verbose - show more logs")
|
||||||
}
|
}
|
||||||
|
|
@ -11,7 +11,7 @@ import (
|
||||||
|
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
publish2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/cmd/cmd/publish"
|
publish2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/generator/cmd/publish"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
|
@ -8,7 +8,7 @@ import (
|
||||||
"github.com/SladkyCitron/slogcolor"
|
"github.com/SladkyCitron/slogcolor"
|
||||||
cc "github.com/ivanpirog/coloredcobra"
|
cc "github.com/ivanpirog/coloredcobra"
|
||||||
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/cmd/cmd"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/generator/cmd"
|
||||||
)
|
)
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
66
go.mod
66
go.mod
|
|
@ -2,20 +2,14 @@ module tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stac
|
||||||
|
|
||||||
go 1.25.6
|
go 1.25.6
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/SladkyCitron/slogcolor v1.8.0
|
github.com/SladkyCitron/slogcolor v1.8.0
|
||||||
github.com/golang-jwt/jwt/v5 v5.3.1
|
github.com/golang-jwt/jwt/v5 v5.3.1
|
||||||
github.com/golangci/golangci-lint/v2 v2.10.1
|
|
||||||
github.com/google/go-cmp v0.7.0
|
github.com/google/go-cmp v0.7.0
|
||||||
github.com/google/uuid v1.6.0
|
github.com/google/uuid v1.6.0
|
||||||
github.com/hashicorp/terraform-plugin-codegen-framework v0.4.1
|
github.com/hashicorp/terraform-plugin-framework v1.18.0
|
||||||
github.com/hashicorp/terraform-plugin-codegen-openapi v0.3.0
|
|
||||||
github.com/hashicorp/terraform-plugin-docs v0.24.0
|
|
||||||
github.com/hashicorp/terraform-plugin-framework v1.17.0
|
|
||||||
github.com/hashicorp/terraform-plugin-framework-validators v0.19.0
|
github.com/hashicorp/terraform-plugin-framework-validators v0.19.0
|
||||||
github.com/hashicorp/terraform-plugin-go v0.29.0
|
github.com/hashicorp/terraform-plugin-go v0.30.0
|
||||||
github.com/hashicorp/terraform-plugin-log v0.10.0
|
github.com/hashicorp/terraform-plugin-log v0.10.0
|
||||||
github.com/hashicorp/terraform-plugin-testing v1.14.0
|
github.com/hashicorp/terraform-plugin-testing v1.14.0
|
||||||
github.com/iancoleman/strcase v0.3.0
|
github.com/iancoleman/strcase v0.3.0
|
||||||
|
|
@ -24,10 +18,10 @@ require (
|
||||||
github.com/joho/godotenv v1.5.1
|
github.com/joho/godotenv v1.5.1
|
||||||
github.com/ldez/go-git-cmd-wrapper/v2 v2.9.1
|
github.com/ldez/go-git-cmd-wrapper/v2 v2.9.1
|
||||||
github.com/spf13/cobra v1.10.2
|
github.com/spf13/cobra v1.10.2
|
||||||
github.com/stackitcloud/stackit-sdk-go/core v0.21.1
|
github.com/stackitcloud/stackit-sdk-go/core v0.22.0
|
||||||
github.com/stackitcloud/stackit-sdk-go/services/iaasalpha v0.1.23-alpha
|
github.com/stackitcloud/stackit-sdk-go/services/postgresflex v1.4.0
|
||||||
|
github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex v1.5.0
|
||||||
github.com/teambition/rrule-go v1.8.2
|
github.com/teambition/rrule-go v1.8.2
|
||||||
golang.org/x/tools v0.42.0
|
|
||||||
gopkg.in/yaml.v3 v3.0.1
|
gopkg.in/yaml.v3 v3.0.1
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -54,14 +48,14 @@ require (
|
||||||
github.com/Masterminds/goutils v1.1.1 // indirect
|
github.com/Masterminds/goutils v1.1.1 // indirect
|
||||||
github.com/Masterminds/semver/v3 v3.4.0 // indirect
|
github.com/Masterminds/semver/v3 v3.4.0 // indirect
|
||||||
github.com/Masterminds/sprig/v3 v3.2.3 // indirect
|
github.com/Masterminds/sprig/v3 v3.2.3 // indirect
|
||||||
github.com/MirrexOne/unqueryvet v1.5.3 // indirect
|
github.com/MirrexOne/unqueryvet v1.5.4 // indirect
|
||||||
github.com/OpenPeeDeeP/depguard/v2 v2.2.1 // indirect
|
github.com/OpenPeeDeeP/depguard/v2 v2.2.1 // indirect
|
||||||
github.com/ProtonMail/go-crypto v1.3.0 // indirect
|
github.com/ProtonMail/go-crypto v1.4.0 // indirect
|
||||||
github.com/agext/levenshtein v1.2.3 // indirect
|
github.com/agext/levenshtein v1.2.3 // indirect
|
||||||
github.com/alecthomas/chroma/v2 v2.23.1 // indirect
|
github.com/alecthomas/chroma/v2 v2.23.1 // indirect
|
||||||
github.com/alecthomas/go-check-sumtype v0.3.1 // indirect
|
github.com/alecthomas/go-check-sumtype v0.3.1 // indirect
|
||||||
github.com/alexkohler/nakedret/v2 v2.0.6 // indirect
|
github.com/alexkohler/nakedret/v2 v2.0.6 // indirect
|
||||||
github.com/alexkohler/prealloc v1.0.2 // indirect
|
github.com/alexkohler/prealloc v1.1.0 // indirect
|
||||||
github.com/alfatraining/structtag v1.0.0 // indirect
|
github.com/alfatraining/structtag v1.0.0 // indirect
|
||||||
github.com/alingse/asasalint v0.0.11 // indirect
|
github.com/alingse/asasalint v0.0.11 // indirect
|
||||||
github.com/alingse/nilnesserr v0.2.0 // indirect
|
github.com/alingse/nilnesserr v0.2.0 // indirect
|
||||||
|
|
@ -70,7 +64,6 @@ require (
|
||||||
github.com/ashanbrown/forbidigo/v2 v2.3.0 // indirect
|
github.com/ashanbrown/forbidigo/v2 v2.3.0 // indirect
|
||||||
github.com/ashanbrown/makezero/v2 v2.1.0 // indirect
|
github.com/ashanbrown/makezero/v2 v2.1.0 // indirect
|
||||||
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
|
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
|
||||||
github.com/bahlo/generic-list-go v0.2.0 // indirect
|
|
||||||
github.com/beorn7/perks v1.0.1 // indirect
|
github.com/beorn7/perks v1.0.1 // indirect
|
||||||
github.com/bgentry/speakeasy v0.1.0 // indirect
|
github.com/bgentry/speakeasy v0.1.0 // indirect
|
||||||
github.com/bkielbasa/cyclop v1.2.3 // indirect
|
github.com/bkielbasa/cyclop v1.2.3 // indirect
|
||||||
|
|
@ -80,7 +73,6 @@ require (
|
||||||
github.com/bombsimon/wsl/v5 v5.6.0 // indirect
|
github.com/bombsimon/wsl/v5 v5.6.0 // indirect
|
||||||
github.com/breml/bidichk v0.3.3 // indirect
|
github.com/breml/bidichk v0.3.3 // indirect
|
||||||
github.com/breml/errchkjson v0.4.1 // indirect
|
github.com/breml/errchkjson v0.4.1 // indirect
|
||||||
github.com/buger/jsonparser v1.1.1 // indirect
|
|
||||||
github.com/butuzov/ireturn v0.4.0 // indirect
|
github.com/butuzov/ireturn v0.4.0 // indirect
|
||||||
github.com/butuzov/mirror v1.3.0 // indirect
|
github.com/butuzov/mirror v1.3.0 // indirect
|
||||||
github.com/catenacyber/perfsprint v0.10.1 // indirect
|
github.com/catenacyber/perfsprint v0.10.1 // indirect
|
||||||
|
|
@ -100,7 +92,6 @@ require (
|
||||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
|
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
|
||||||
github.com/denis-tingaikin/go-header v0.5.0 // indirect
|
github.com/denis-tingaikin/go-header v0.5.0 // indirect
|
||||||
github.com/dlclark/regexp2 v1.11.5 // indirect
|
github.com/dlclark/regexp2 v1.11.5 // indirect
|
||||||
github.com/dprotaso/go-yit v0.0.0-20220510233725-9ba8df137936 // indirect
|
|
||||||
github.com/ettle/strcase v0.2.0 // indirect
|
github.com/ettle/strcase v0.2.0 // indirect
|
||||||
github.com/fatih/color v1.18.0 // indirect
|
github.com/fatih/color v1.18.0 // indirect
|
||||||
github.com/fatih/structtag v1.2.0 // indirect
|
github.com/fatih/structtag v1.2.0 // indirect
|
||||||
|
|
@ -126,6 +117,7 @@ require (
|
||||||
github.com/golangci/dupl v0.0.0-20250308024227-f665c8d69b32 // indirect
|
github.com/golangci/dupl v0.0.0-20250308024227-f665c8d69b32 // indirect
|
||||||
github.com/golangci/go-printf-func-name v0.1.1 // indirect
|
github.com/golangci/go-printf-func-name v0.1.1 // indirect
|
||||||
github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d // indirect
|
github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d // indirect
|
||||||
|
github.com/golangci/golangci-lint/v2 v2.11.2 // indirect
|
||||||
github.com/golangci/golines v0.15.0 // indirect
|
github.com/golangci/golines v0.15.0 // indirect
|
||||||
github.com/golangci/misspell v0.8.0 // indirect
|
github.com/golangci/misspell v0.8.0 // indirect
|
||||||
github.com/golangci/plugin-module-register v0.1.2 // indirect
|
github.com/golangci/plugin-module-register v0.1.2 // indirect
|
||||||
|
|
@ -155,22 +147,23 @@ require (
|
||||||
github.com/hashicorp/logutils v1.0.0 // indirect
|
github.com/hashicorp/logutils v1.0.0 // indirect
|
||||||
github.com/hashicorp/terraform-exec v0.25.0 // indirect
|
github.com/hashicorp/terraform-exec v0.25.0 // indirect
|
||||||
github.com/hashicorp/terraform-json v0.27.2 // indirect
|
github.com/hashicorp/terraform-json v0.27.2 // indirect
|
||||||
github.com/hashicorp/terraform-plugin-codegen-spec v0.2.0 // indirect
|
github.com/hashicorp/terraform-plugin-docs v0.24.0 // indirect
|
||||||
github.com/hashicorp/terraform-plugin-sdk/v2 v2.38.2 // indirect
|
github.com/hashicorp/terraform-plugin-sdk/v2 v2.39.0 // indirect
|
||||||
github.com/hashicorp/terraform-registry-address v0.4.0 // indirect
|
github.com/hashicorp/terraform-registry-address v0.4.0 // indirect
|
||||||
github.com/hashicorp/terraform-svchost v0.2.0 // indirect
|
github.com/hashicorp/terraform-svchost v0.2.1 // indirect
|
||||||
github.com/hashicorp/yamux v0.1.2 // indirect
|
github.com/hashicorp/yamux v0.1.2 // indirect
|
||||||
github.com/hexops/gotextdiff v1.0.3 // indirect
|
github.com/hexops/gotextdiff v1.0.3 // indirect
|
||||||
github.com/huandu/xstrings v1.4.0 // indirect
|
github.com/huandu/xstrings v1.3.3 // indirect
|
||||||
github.com/imdario/mergo v0.3.16 // indirect
|
github.com/imdario/mergo v0.3.15 // indirect
|
||||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||||
github.com/jgautheron/goconst v1.8.2 // indirect
|
github.com/jgautheron/goconst v1.8.2 // indirect
|
||||||
github.com/jingyugao/rowserrcheck v1.1.1 // indirect
|
github.com/jingyugao/rowserrcheck v1.1.1 // indirect
|
||||||
github.com/jjti/go-spancheck v0.6.5 // indirect
|
github.com/jjti/go-spancheck v0.6.5 // indirect
|
||||||
github.com/julz/importas v0.2.0 // indirect
|
github.com/julz/importas v0.2.0 // indirect
|
||||||
github.com/karamaru-alpha/copyloopvar v1.2.2 // indirect
|
github.com/karamaru-alpha/copyloopvar v1.2.2 // indirect
|
||||||
github.com/kisielk/errcheck v1.9.0 // indirect
|
github.com/kisielk/errcheck v1.10.0 // indirect
|
||||||
github.com/kkHAIKE/contextcheck v1.1.6 // indirect
|
github.com/kkHAIKE/contextcheck v1.1.6 // indirect
|
||||||
|
github.com/kr/text v0.2.0 // indirect
|
||||||
github.com/kulti/thelper v0.7.1 // indirect
|
github.com/kulti/thelper v0.7.1 // indirect
|
||||||
github.com/kunwardeep/paralleltest v1.0.15 // indirect
|
github.com/kunwardeep/paralleltest v1.0.15 // indirect
|
||||||
github.com/lasiar/canonicalheader v1.1.2 // indirect
|
github.com/lasiar/canonicalheader v1.1.2 // indirect
|
||||||
|
|
@ -184,7 +177,6 @@ require (
|
||||||
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
|
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
|
||||||
github.com/macabu/inamedparam v0.2.0 // indirect
|
github.com/macabu/inamedparam v0.2.0 // indirect
|
||||||
github.com/magiconair/properties v1.8.6 // indirect
|
github.com/magiconair/properties v1.8.6 // indirect
|
||||||
github.com/mailru/easyjson v0.7.7 // indirect
|
|
||||||
github.com/manuelarte/embeddedstructfieldcheck v0.4.0 // indirect
|
github.com/manuelarte/embeddedstructfieldcheck v0.4.0 // indirect
|
||||||
github.com/manuelarte/funcorder v0.5.0 // indirect
|
github.com/manuelarte/funcorder v0.5.0 // indirect
|
||||||
github.com/maratori/testableexamples v1.0.1 // indirect
|
github.com/maratori/testableexamples v1.0.1 // indirect
|
||||||
|
|
@ -194,7 +186,7 @@ require (
|
||||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||||
github.com/mattn/go-runewidth v0.0.16 // indirect
|
github.com/mattn/go-runewidth v0.0.16 // indirect
|
||||||
github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect
|
github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect
|
||||||
github.com/mgechev/revive v1.14.0 // indirect
|
github.com/mgechev/revive v1.15.0 // indirect
|
||||||
github.com/mitchellh/copystructure v1.2.0 // indirect
|
github.com/mitchellh/copystructure v1.2.0 // indirect
|
||||||
github.com/mitchellh/go-homedir v1.1.0 // indirect
|
github.com/mitchellh/go-homedir v1.1.0 // indirect
|
||||||
github.com/mitchellh/go-testing-interface v1.14.1 // indirect
|
github.com/mitchellh/go-testing-interface v1.14.1 // indirect
|
||||||
|
|
@ -208,7 +200,6 @@ require (
|
||||||
github.com/nishanths/predeclared v0.2.2 // indirect
|
github.com/nishanths/predeclared v0.2.2 // indirect
|
||||||
github.com/nunnatsa/ginkgolinter v0.23.0 // indirect
|
github.com/nunnatsa/ginkgolinter v0.23.0 // indirect
|
||||||
github.com/oklog/run v1.2.0 // indirect
|
github.com/oklog/run v1.2.0 // indirect
|
||||||
github.com/pb33f/libopenapi v0.15.0 // indirect
|
|
||||||
github.com/pelletier/go-toml v1.9.5 // indirect
|
github.com/pelletier/go-toml v1.9.5 // indirect
|
||||||
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
|
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
|
||||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
||||||
|
|
@ -231,14 +222,14 @@ require (
|
||||||
github.com/santhosh-tekuri/jsonschema/v6 v6.0.2 // indirect
|
github.com/santhosh-tekuri/jsonschema/v6 v6.0.2 // indirect
|
||||||
github.com/sashamelentyev/interfacebloat v1.1.0 // indirect
|
github.com/sashamelentyev/interfacebloat v1.1.0 // indirect
|
||||||
github.com/sashamelentyev/usestdlibvars v1.29.0 // indirect
|
github.com/sashamelentyev/usestdlibvars v1.29.0 // indirect
|
||||||
github.com/securego/gosec/v2 v2.23.0 // indirect
|
github.com/securego/gosec/v2 v2.24.7 // indirect
|
||||||
github.com/shopspring/decimal v1.3.1 // indirect
|
github.com/shopspring/decimal v1.3.1 // indirect
|
||||||
github.com/sirupsen/logrus v1.9.4 // indirect
|
github.com/sirupsen/logrus v1.9.4 // indirect
|
||||||
github.com/sivchari/containedctx v1.0.3 // indirect
|
github.com/sivchari/containedctx v1.0.3 // indirect
|
||||||
github.com/sonatard/noctx v0.4.0 // indirect
|
github.com/sonatard/noctx v0.5.0 // indirect
|
||||||
github.com/sourcegraph/go-diff v0.7.0 // indirect
|
github.com/sourcegraph/go-diff v0.7.0 // indirect
|
||||||
github.com/spf13/afero v1.15.0 // indirect
|
github.com/spf13/afero v1.15.0 // indirect
|
||||||
github.com/spf13/cast v1.5.1 // indirect
|
github.com/spf13/cast v1.5.0 // indirect
|
||||||
github.com/spf13/jwalterweatherman v1.1.0 // indirect
|
github.com/spf13/jwalterweatherman v1.1.0 // indirect
|
||||||
github.com/spf13/pflag v1.0.10 // indirect
|
github.com/spf13/pflag v1.0.10 // indirect
|
||||||
github.com/spf13/viper v1.12.0 // indirect
|
github.com/spf13/viper v1.12.0 // indirect
|
||||||
|
|
@ -254,16 +245,11 @@ require (
|
||||||
github.com/tommy-muehle/go-mnd/v2 v2.5.1 // indirect
|
github.com/tommy-muehle/go-mnd/v2 v2.5.1 // indirect
|
||||||
github.com/ultraware/funlen v0.2.0 // indirect
|
github.com/ultraware/funlen v0.2.0 // indirect
|
||||||
github.com/ultraware/whitespace v0.2.0 // indirect
|
github.com/ultraware/whitespace v0.2.0 // indirect
|
||||||
github.com/uudashr/gocognit v1.2.0 // indirect
|
github.com/uudashr/gocognit v1.2.1 // indirect
|
||||||
github.com/uudashr/iface v1.4.1 // indirect
|
github.com/uudashr/iface v1.4.1 // indirect
|
||||||
github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect
|
github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect
|
||||||
github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect
|
github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect
|
||||||
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
|
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
|
||||||
github.com/vmware-labs/yaml-jsonpath v0.3.2 // indirect
|
|
||||||
github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect
|
|
||||||
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect
|
|
||||||
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect
|
|
||||||
github.com/xeipuuv/gojsonschema v1.2.0 // indirect
|
|
||||||
github.com/xen0n/gosmopolitan v1.3.0 // indirect
|
github.com/xen0n/gosmopolitan v1.3.0 // indirect
|
||||||
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
|
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
|
||||||
github.com/yagipy/maintidx v1.0.0 // indirect
|
github.com/yagipy/maintidx v1.0.0 // indirect
|
||||||
|
|
@ -271,7 +257,7 @@ require (
|
||||||
github.com/ykadowak/zerologlint v0.1.5 // indirect
|
github.com/ykadowak/zerologlint v0.1.5 // indirect
|
||||||
github.com/yuin/goldmark v1.7.7 // indirect
|
github.com/yuin/goldmark v1.7.7 // indirect
|
||||||
github.com/yuin/goldmark-meta v1.1.0 // indirect
|
github.com/yuin/goldmark-meta v1.1.0 // indirect
|
||||||
github.com/zclconf/go-cty v1.17.0 // indirect
|
github.com/zclconf/go-cty v1.18.0 // indirect
|
||||||
gitlab.com/bosi/decorder v0.4.2 // indirect
|
gitlab.com/bosi/decorder v0.4.2 // indirect
|
||||||
go-simpler.org/musttag v0.14.0 // indirect
|
go-simpler.org/musttag v0.14.0 // indirect
|
||||||
go-simpler.org/sloglint v0.11.1 // indirect
|
go-simpler.org/sloglint v0.11.1 // indirect
|
||||||
|
|
@ -285,14 +271,14 @@ require (
|
||||||
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b // indirect
|
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b // indirect
|
||||||
golang.org/x/exp/typeparams v0.0.0-20260209203927-2842357ff358 // indirect
|
golang.org/x/exp/typeparams v0.0.0-20260209203927-2842357ff358 // indirect
|
||||||
golang.org/x/mod v0.33.0 // indirect
|
golang.org/x/mod v0.33.0 // indirect
|
||||||
golang.org/x/net v0.50.0 // indirect
|
golang.org/x/net v0.51.0 // indirect
|
||||||
golang.org/x/sync v0.19.0 // indirect
|
golang.org/x/sync v0.19.0 // indirect
|
||||||
golang.org/x/sys v0.41.0 // indirect
|
golang.org/x/sys v0.41.0 // indirect
|
||||||
golang.org/x/telemetry v0.0.0-20260209163413-e7419c687ee4 // indirect
|
|
||||||
golang.org/x/text v0.34.0 // indirect
|
golang.org/x/text v0.34.0 // indirect
|
||||||
|
golang.org/x/tools v0.42.0 // indirect
|
||||||
google.golang.org/appengine v1.6.8 // indirect
|
google.golang.org/appengine v1.6.8 // indirect
|
||||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20260209200024-4cfbd4190f57 // indirect
|
google.golang.org/genproto/googleapis/rpc v0.0.0-20260226221140-a57be14db171 // indirect
|
||||||
google.golang.org/grpc v1.79.1 // indirect
|
google.golang.org/grpc v1.79.2 // indirect
|
||||||
google.golang.org/protobuf v1.36.11 // indirect
|
google.golang.org/protobuf v1.36.11 // indirect
|
||||||
gopkg.in/ini.v1 v1.67.0 // indirect
|
gopkg.in/ini.v1 v1.67.0 // indirect
|
||||||
gopkg.in/yaml.v2 v2.4.0 // indirect
|
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||||
|
|
|
||||||
201
go.sum
201
go.sum
|
|
@ -60,6 +60,8 @@ github.com/Antonboom/nilnil v1.1.1/go.mod h1:yCyAmSw3doopbOWhJlVci+HuyNRuHJKIv6V
|
||||||
github.com/Antonboom/testifylint v1.6.4 h1:gs9fUEy+egzxkEbq9P4cpcMB6/G0DYdMeiFS87UiqmQ=
|
github.com/Antonboom/testifylint v1.6.4 h1:gs9fUEy+egzxkEbq9P4cpcMB6/G0DYdMeiFS87UiqmQ=
|
||||||
github.com/Antonboom/testifylint v1.6.4/go.mod h1:YO33FROXX2OoUfwjz8g+gUxQXio5i9qpVy7nXGbxDD4=
|
github.com/Antonboom/testifylint v1.6.4/go.mod h1:YO33FROXX2OoUfwjz8g+gUxQXio5i9qpVy7nXGbxDD4=
|
||||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||||
|
github.com/BurntSushi/toml v1.2.1 h1:9F2/+DoOYIOksmaJFPw1tGFy1eDnIJXg+UHjuD8lTak=
|
||||||
|
github.com/BurntSushi/toml v1.2.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ=
|
||||||
github.com/BurntSushi/toml v1.6.0 h1:dRaEfpa2VI55EwlIW72hMRHdWouJeRF7TPYhI+AUQjk=
|
github.com/BurntSushi/toml v1.6.0 h1:dRaEfpa2VI55EwlIW72hMRHdWouJeRF7TPYhI+AUQjk=
|
||||||
github.com/BurntSushi/toml v1.6.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho=
|
github.com/BurntSushi/toml v1.6.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho=
|
||||||
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
||||||
|
|
@ -69,6 +71,7 @@ github.com/Kunde21/markdownfmt/v3 v3.1.0 h1:KiZu9LKs+wFFBQKhrZJrFZwtLnCCWJahL+S+
|
||||||
github.com/Kunde21/markdownfmt/v3 v3.1.0/go.mod h1:tPXN1RTyOzJwhfHoon9wUr4HGYmWgVxSQN6VBJDkrVc=
|
github.com/Kunde21/markdownfmt/v3 v3.1.0/go.mod h1:tPXN1RTyOzJwhfHoon9wUr4HGYmWgVxSQN6VBJDkrVc=
|
||||||
github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI=
|
github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI=
|
||||||
github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU=
|
github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU=
|
||||||
|
github.com/Masterminds/semver/v3 v3.2.0 h1:3MEsd0SM6jqZojhjLWWeBY+Kcjy9i6MQAeY7YgDP83g=
|
||||||
github.com/Masterminds/semver/v3 v3.2.0/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ=
|
github.com/Masterminds/semver/v3 v3.2.0/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ=
|
||||||
github.com/Masterminds/semver/v3 v3.4.0 h1:Zog+i5UMtVoCU8oKka5P7i9q9HgrJeGzI9SA1Xbatp0=
|
github.com/Masterminds/semver/v3 v3.4.0 h1:Zog+i5UMtVoCU8oKka5P7i9q9HgrJeGzI9SA1Xbatp0=
|
||||||
github.com/Masterminds/semver/v3 v3.4.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM=
|
github.com/Masterminds/semver/v3 v3.4.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM=
|
||||||
|
|
@ -76,24 +79,20 @@ github.com/Masterminds/sprig/v3 v3.2.3 h1:eL2fZNezLomi0uOLqjQoN6BfsDD+fyLtgbJMAj
|
||||||
github.com/Masterminds/sprig/v3 v3.2.3/go.mod h1:rXcFaZ2zZbLRJv/xSysmlgIM1u11eBaRMhvYXJNkGuM=
|
github.com/Masterminds/sprig/v3 v3.2.3/go.mod h1:rXcFaZ2zZbLRJv/xSysmlgIM1u11eBaRMhvYXJNkGuM=
|
||||||
github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY=
|
github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY=
|
||||||
github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU=
|
github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU=
|
||||||
github.com/MirrexOne/unqueryvet v1.5.3 h1:LpT3rsH+IY3cQddWF9bg4C7jsbASdGnrOSofY8IPEiw=
|
github.com/MirrexOne/unqueryvet v1.5.4 h1:38QOxShO7JmMWT+eCdDMbcUgGCOeJphVkzzRgyLJgsQ=
|
||||||
github.com/MirrexOne/unqueryvet v1.5.3/go.mod h1:fs9Zq6eh1LRIhsDIsxf9PONVUjYdFHdtkHIgZdJnyPU=
|
github.com/MirrexOne/unqueryvet v1.5.4/go.mod h1:fs9Zq6eh1LRIhsDIsxf9PONVUjYdFHdtkHIgZdJnyPU=
|
||||||
github.com/OpenPeeDeeP/depguard/v2 v2.2.1 h1:vckeWVESWp6Qog7UZSARNqfu/cZqvki8zsuj3piCMx4=
|
github.com/OpenPeeDeeP/depguard/v2 v2.2.1 h1:vckeWVESWp6Qog7UZSARNqfu/cZqvki8zsuj3piCMx4=
|
||||||
github.com/OpenPeeDeeP/depguard/v2 v2.2.1/go.mod h1:q4DKzC4UcVaAvcfd41CZh0PWpGgzrVxUYBlgKNGquUo=
|
github.com/OpenPeeDeeP/depguard/v2 v2.2.1/go.mod h1:q4DKzC4UcVaAvcfd41CZh0PWpGgzrVxUYBlgKNGquUo=
|
||||||
github.com/ProtonMail/go-crypto v1.3.0 h1:ILq8+Sf5If5DCpHQp4PbZdS1J7HDFRXz/+xKBiRGFrw=
|
github.com/ProtonMail/go-crypto v1.4.0 h1:Zq/pbM3F5DFgJiMouxEdSVY44MVoQNEKp5d5QxIQceQ=
|
||||||
github.com/ProtonMail/go-crypto v1.3.0/go.mod h1:9whxjD8Rbs29b4XWbB8irEcE8KHMqaR2e7GWU1R+/PE=
|
github.com/ProtonMail/go-crypto v1.4.0/go.mod h1:e1OaTyu5SYVrO9gKOEhTc+5UcXtTUa+P3uLudwcgPqo=
|
||||||
github.com/SladkyCitron/slogcolor v1.8.0 h1:ln4mUPfVhs7a/vZfjnKkz5YZ71Bg/KFWneS2hfFq6FM=
|
github.com/SladkyCitron/slogcolor v1.8.0 h1:ln4mUPfVhs7a/vZfjnKkz5YZ71Bg/KFWneS2hfFq6FM=
|
||||||
github.com/SladkyCitron/slogcolor v1.8.0/go.mod h1:ft8LEVIl4isUkebakhv+ngNXJjWBumnwhXfxTLApf3M=
|
github.com/SladkyCitron/slogcolor v1.8.0/go.mod h1:ft8LEVIl4isUkebakhv+ngNXJjWBumnwhXfxTLApf3M=
|
||||||
github.com/agext/levenshtein v1.2.3 h1:YB2fHEn0UJagG8T1rrWknE3ZQzWM06O8AMAatNn7lmo=
|
github.com/agext/levenshtein v1.2.3 h1:YB2fHEn0UJagG8T1rrWknE3ZQzWM06O8AMAatNn7lmo=
|
||||||
github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558=
|
github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558=
|
||||||
github.com/alecthomas/assert/v2 v2.11.0 h1:2Q9r3ki8+JYXvGsDyBXwH3LcJ+WK5D0gc5E8vS6K3D0=
|
|
||||||
github.com/alecthomas/assert/v2 v2.11.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k=
|
|
||||||
github.com/alecthomas/chroma/v2 v2.23.1 h1:nv2AVZdTyClGbVQkIzlDm/rnhk1E9bU9nXwmZ/Vk/iY=
|
github.com/alecthomas/chroma/v2 v2.23.1 h1:nv2AVZdTyClGbVQkIzlDm/rnhk1E9bU9nXwmZ/Vk/iY=
|
||||||
github.com/alecthomas/chroma/v2 v2.23.1/go.mod h1:NqVhfBR0lte5Ouh3DcthuUCTUpDC9cxBOfyMbMQPs3o=
|
github.com/alecthomas/chroma/v2 v2.23.1/go.mod h1:NqVhfBR0lte5Ouh3DcthuUCTUpDC9cxBOfyMbMQPs3o=
|
||||||
github.com/alecthomas/go-check-sumtype v0.3.1 h1:u9aUvbGINJxLVXiFvHUlPEaD7VDULsrxJb4Aq31NLkU=
|
github.com/alecthomas/go-check-sumtype v0.3.1 h1:u9aUvbGINJxLVXiFvHUlPEaD7VDULsrxJb4Aq31NLkU=
|
||||||
github.com/alecthomas/go-check-sumtype v0.3.1/go.mod h1:A8TSiN3UPRw3laIgWEUOHHLPa6/r9MtoigdlP5h3K/E=
|
github.com/alecthomas/go-check-sumtype v0.3.1/go.mod h1:A8TSiN3UPRw3laIgWEUOHHLPa6/r9MtoigdlP5h3K/E=
|
||||||
github.com/alecthomas/repr v0.5.2 h1:SU73FTI9D1P5UNtvseffFSGmdNci/O6RsqzeXJtP0Qs=
|
|
||||||
github.com/alecthomas/repr v0.5.2/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4=
|
|
||||||
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||||
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||||
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
||||||
|
|
@ -101,8 +100,8 @@ github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRF
|
||||||
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
|
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
|
||||||
github.com/alexkohler/nakedret/v2 v2.0.6 h1:ME3Qef1/KIKr3kWX3nti3hhgNxw6aqN5pZmQiFSsuzQ=
|
github.com/alexkohler/nakedret/v2 v2.0.6 h1:ME3Qef1/KIKr3kWX3nti3hhgNxw6aqN5pZmQiFSsuzQ=
|
||||||
github.com/alexkohler/nakedret/v2 v2.0.6/go.mod h1:l3RKju/IzOMQHmsEvXwkqMDzHHvurNQfAgE1eVmT40Q=
|
github.com/alexkohler/nakedret/v2 v2.0.6/go.mod h1:l3RKju/IzOMQHmsEvXwkqMDzHHvurNQfAgE1eVmT40Q=
|
||||||
github.com/alexkohler/prealloc v1.0.2 h1:MPo8cIkGkZytq7WNH9UHv3DIX1mPz1RatPXnZb0zHWQ=
|
github.com/alexkohler/prealloc v1.1.0 h1:cKGRBqlXw5iyQGLYhrXrDlcHxugXpTq4tQ5c91wkf8M=
|
||||||
github.com/alexkohler/prealloc v1.0.2/go.mod h1:fT39Jge3bQrfA7nPMDngUfvUbQGQeJyGQnR+913SCig=
|
github.com/alexkohler/prealloc v1.1.0/go.mod h1:fT39Jge3bQrfA7nPMDngUfvUbQGQeJyGQnR+913SCig=
|
||||||
github.com/alfatraining/structtag v1.0.0 h1:2qmcUqNcCoyVJ0up879K614L9PazjBSFruTB0GOFjCc=
|
github.com/alfatraining/structtag v1.0.0 h1:2qmcUqNcCoyVJ0up879K614L9PazjBSFruTB0GOFjCc=
|
||||||
github.com/alfatraining/structtag v1.0.0/go.mod h1:p3Xi5SwzTi+Ryj64DqjLWz7XurHxbGsq6y3ubePJPus=
|
github.com/alfatraining/structtag v1.0.0/go.mod h1:p3Xi5SwzTi+Ryj64DqjLWz7XurHxbGsq6y3ubePJPus=
|
||||||
github.com/alingse/asasalint v0.0.11 h1:SFwnQXJ49Kx/1GghOFz1XGqHYKp21Kq1nHad/0WQRnw=
|
github.com/alingse/asasalint v0.0.11 h1:SFwnQXJ49Kx/1GghOFz1XGqHYKp21Kq1nHad/0WQRnw=
|
||||||
|
|
@ -120,8 +119,6 @@ github.com/ashanbrown/makezero/v2 v2.1.0 h1:snuKYMbqosNokUKm+R6/+vOPs8yVAi46La7C
|
||||||
github.com/ashanbrown/makezero/v2 v2.1.0/go.mod h1:aEGT/9q3S8DHeE57C88z2a6xydvgx8J5hgXIGWgo0MY=
|
github.com/ashanbrown/makezero/v2 v2.1.0/go.mod h1:aEGT/9q3S8DHeE57C88z2a6xydvgx8J5hgXIGWgo0MY=
|
||||||
github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k=
|
github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k=
|
||||||
github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8=
|
github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8=
|
||||||
github.com/bahlo/generic-list-go v0.2.0 h1:5sz/EEAK+ls5wF+NeqDpk5+iNdMDXrh3z3nPnH1Wvgk=
|
|
||||||
github.com/bahlo/generic-list-go v0.2.0/go.mod h1:2KvAjgMlE5NNynlg/5iLrrCCZ2+5xWbdbCW3pNTGyYg=
|
|
||||||
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
|
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
|
||||||
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
|
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
|
||||||
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
|
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
|
||||||
|
|
@ -144,8 +141,6 @@ github.com/breml/errchkjson v0.4.1 h1:keFSS8D7A2T0haP9kzZTi7o26r7kE3vymjZNeNDRDw
|
||||||
github.com/breml/errchkjson v0.4.1/go.mod h1:a23OvR6Qvcl7DG/Z4o0el6BRAjKnaReoPQFciAl9U3s=
|
github.com/breml/errchkjson v0.4.1/go.mod h1:a23OvR6Qvcl7DG/Z4o0el6BRAjKnaReoPQFciAl9U3s=
|
||||||
github.com/bufbuild/protocompile v0.14.1 h1:iA73zAf/fyljNjQKwYzUHD6AD4R8KMasmwa/FBatYVw=
|
github.com/bufbuild/protocompile v0.14.1 h1:iA73zAf/fyljNjQKwYzUHD6AD4R8KMasmwa/FBatYVw=
|
||||||
github.com/bufbuild/protocompile v0.14.1/go.mod h1:ppVdAIhbr2H8asPk6k4pY7t9zB1OU5DoEw9xY/FUi1c=
|
github.com/bufbuild/protocompile v0.14.1/go.mod h1:ppVdAIhbr2H8asPk6k4pY7t9zB1OU5DoEw9xY/FUi1c=
|
||||||
github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs=
|
|
||||||
github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0=
|
|
||||||
github.com/butuzov/ireturn v0.4.0 h1:+s76bF/PfeKEdbG8b54aCocxXmi0wvYdOVsWxVO7n8E=
|
github.com/butuzov/ireturn v0.4.0 h1:+s76bF/PfeKEdbG8b54aCocxXmi0wvYdOVsWxVO7n8E=
|
||||||
github.com/butuzov/ireturn v0.4.0/go.mod h1:ghI0FrCmap8pDWZwfPisFD1vEc56VKH4NpQUxDHta70=
|
github.com/butuzov/ireturn v0.4.0/go.mod h1:ghI0FrCmap8pDWZwfPisFD1vEc56VKH4NpQUxDHta70=
|
||||||
github.com/butuzov/mirror v1.3.0 h1:HdWCXzmwlQHdVhwvsfBb2Au0r3HyINry3bDWLYXiKoc=
|
github.com/butuzov/mirror v1.3.0 h1:HdWCXzmwlQHdVhwvsfBb2Au0r3HyINry3bDWLYXiKoc=
|
||||||
|
|
@ -182,6 +177,7 @@ github.com/cloudflare/circl v1.6.3/go.mod h1:2eXP6Qfat4O/Yhh8BznvKnJ+uzEoTQ6jVKJ
|
||||||
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
|
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
||||||
|
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||||
github.com/curioswitch/go-reassign v0.3.0 h1:dh3kpQHuADL3cobV/sSGETA8DOv457dwl+fbBAhrQPs=
|
github.com/curioswitch/go-reassign v0.3.0 h1:dh3kpQHuADL3cobV/sSGETA8DOv457dwl+fbBAhrQPs=
|
||||||
github.com/curioswitch/go-reassign v0.3.0/go.mod h1:nApPCCTtqLJN/s8HfItCcKV0jIPwluBOvZP+dsJGA88=
|
github.com/curioswitch/go-reassign v0.3.0/go.mod h1:nApPCCTtqLJN/s8HfItCcKV0jIPwluBOvZP+dsJGA88=
|
||||||
github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s=
|
github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s=
|
||||||
|
|
@ -190,8 +186,6 @@ github.com/daixiang0/gci v0.13.7 h1:+0bG5eK9vlI08J+J/NWGbWPTNiXPG4WhNLJOkSxWITQ=
|
||||||
github.com/daixiang0/gci v0.13.7/go.mod h1:812WVN6JLFY9S6Tv76twqmNqevN0pa3SX3nih0brVzQ=
|
github.com/daixiang0/gci v0.13.7/go.mod h1:812WVN6JLFY9S6Tv76twqmNqevN0pa3SX3nih0brVzQ=
|
||||||
github.com/dave/dst v0.27.3 h1:P1HPoMza3cMEquVf9kKy8yXsFirry4zEnWOdYPOoIzY=
|
github.com/dave/dst v0.27.3 h1:P1HPoMza3cMEquVf9kKy8yXsFirry4zEnWOdYPOoIzY=
|
||||||
github.com/dave/dst v0.27.3/go.mod h1:jHh6EOibnHgcUW3WjKHisiooEkYwqpHLBSX1iOBhEyc=
|
github.com/dave/dst v0.27.3/go.mod h1:jHh6EOibnHgcUW3WjKHisiooEkYwqpHLBSX1iOBhEyc=
|
||||||
github.com/dave/jennifer v1.7.1 h1:B4jJJDHelWcDhlRQxWeo0Npa/pYKBLrirAQoTN45txo=
|
|
||||||
github.com/dave/jennifer v1.7.1/go.mod h1:nXbxhEmQfOZhWml3D1cDK5M1FLnMSozpbFN/m3RmGZc=
|
|
||||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
|
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
|
||||||
|
|
@ -200,9 +194,6 @@ github.com/denis-tingaikin/go-header v0.5.0 h1:SRdnP5ZKvcO9KKRP1KJrhFR3RrlGuD+42
|
||||||
github.com/denis-tingaikin/go-header v0.5.0/go.mod h1:mMenU5bWrok6Wl2UsZjy+1okegmwQ3UgWl4V1D8gjlY=
|
github.com/denis-tingaikin/go-header v0.5.0/go.mod h1:mMenU5bWrok6Wl2UsZjy+1okegmwQ3UgWl4V1D8gjlY=
|
||||||
github.com/dlclark/regexp2 v1.11.5 h1:Q/sSnsKerHeCkc/jSTNq1oCm7KiVgUMZRDUoRu0JQZQ=
|
github.com/dlclark/regexp2 v1.11.5 h1:Q/sSnsKerHeCkc/jSTNq1oCm7KiVgUMZRDUoRu0JQZQ=
|
||||||
github.com/dlclark/regexp2 v1.11.5/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
|
github.com/dlclark/regexp2 v1.11.5/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
|
||||||
github.com/dprotaso/go-yit v0.0.0-20191028211022-135eb7262960/go.mod h1:9HQzr9D/0PGwMEbC3d5AB7oi67+h4TsQqItC1GVYG58=
|
|
||||||
github.com/dprotaso/go-yit v0.0.0-20220510233725-9ba8df137936 h1:PRxIJD8XjimM5aTknUK9w6DHLDox2r2M3DI4i2pnd3w=
|
|
||||||
github.com/dprotaso/go-yit v0.0.0-20220510233725-9ba8df137936/go.mod h1:ttYvX5qlB+mlV1okblJqcSMtR4c52UKxDiX9GRBS8+Q=
|
|
||||||
github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc=
|
github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc=
|
||||||
github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ=
|
github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ=
|
||||||
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||||
|
|
@ -218,10 +209,6 @@ github.com/fatih/structtag v1.2.0 h1:/OdNE99OxoI/PqaW/SuSK9uxxT3f/tcSZgon/ssNSx4
|
||||||
github.com/fatih/structtag v1.2.0/go.mod h1:mBJUNpUnHmRKrKlQQlmCrh5PuhftFbNv8Ys4/aAZl94=
|
github.com/fatih/structtag v1.2.0/go.mod h1:mBJUNpUnHmRKrKlQQlmCrh5PuhftFbNv8Ys4/aAZl94=
|
||||||
github.com/firefart/nonamedreturns v1.0.6 h1:vmiBcKV/3EqKY3ZiPxCINmpS431OcE1S47AQUwhrg8E=
|
github.com/firefart/nonamedreturns v1.0.6 h1:vmiBcKV/3EqKY3ZiPxCINmpS431OcE1S47AQUwhrg8E=
|
||||||
github.com/firefart/nonamedreturns v1.0.6/go.mod h1:R8NisJnSIpvPWheCq0mNRXJok6D8h7fagJTF8EMEwCo=
|
github.com/firefart/nonamedreturns v1.0.6/go.mod h1:R8NisJnSIpvPWheCq0mNRXJok6D8h7fagJTF8EMEwCo=
|
||||||
github.com/frankban/quicktest v1.14.4 h1:g2rn0vABPOOXmZUj+vbmUp0lPoXEMuhTpIluN0XL9UY=
|
|
||||||
github.com/frankban/quicktest v1.14.4/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
|
|
||||||
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
|
||||||
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
|
|
||||||
github.com/fsnotify/fsnotify v1.5.4 h1:jRbGcIw6P2Meqdwuo0H1p6JVLbL5DHKAKlYndzMwVZI=
|
github.com/fsnotify/fsnotify v1.5.4 h1:jRbGcIw6P2Meqdwuo0H1p6JVLbL5DHKAKlYndzMwVZI=
|
||||||
github.com/fsnotify/fsnotify v1.5.4/go.mod h1:OVB6XrOHzAwXMpEM7uPOzcehqUV2UqJxmVXmkdnm1bU=
|
github.com/fsnotify/fsnotify v1.5.4/go.mod h1:OVB6XrOHzAwXMpEM7uPOzcehqUV2UqJxmVXmkdnm1bU=
|
||||||
github.com/fzipp/gocyclo v0.6.0 h1:lsblElZG7d3ALtGMx9fmxeTKZaLLpU8mET09yN4BBLo=
|
github.com/fzipp/gocyclo v0.6.0 h1:lsblElZG7d3ALtGMx9fmxeTKZaLLpU8mET09yN4BBLo=
|
||||||
|
|
@ -249,13 +236,7 @@ github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI=
|
||||||
github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
|
github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
|
||||||
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
|
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
|
||||||
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
|
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
|
||||||
github.com/go-quicktest/qt v1.101.0 h1:O1K29Txy5P2OK0dGo59b7b0LR6wKfIhttaAhHUyn7eI=
|
|
||||||
github.com/go-quicktest/qt v1.101.0/go.mod h1:14Bz/f7NwaXPtdYEgzsx46kqSxVwTbzVZsDC26tQJow=
|
|
||||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||||
github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0 h1:p104kn46Q8WdvHunIJ9dAyjPVtrBPhSr3KT2yUst43I=
|
|
||||||
github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE=
|
|
||||||
github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI=
|
|
||||||
github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8=
|
|
||||||
github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68=
|
github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68=
|
||||||
github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA=
|
github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA=
|
||||||
github.com/go-toolsmith/astcast v1.1.0 h1:+JN9xZV1A+Re+95pgnMgDboWNVnIMMQXwfBwLRPgSC8=
|
github.com/go-toolsmith/astcast v1.1.0 h1:+JN9xZV1A+Re+95pgnMgDboWNVnIMMQXwfBwLRPgSC8=
|
||||||
|
|
@ -270,8 +251,6 @@ github.com/go-toolsmith/astfmt v1.1.0 h1:iJVPDPp6/7AaeLJEruMsBUlOYCmvg0MoCfJprsO
|
||||||
github.com/go-toolsmith/astfmt v1.1.0/go.mod h1:OrcLlRwu0CuiIBp/8b5PYF9ktGVZUjlNMV634mhwuQ4=
|
github.com/go-toolsmith/astfmt v1.1.0/go.mod h1:OrcLlRwu0CuiIBp/8b5PYF9ktGVZUjlNMV634mhwuQ4=
|
||||||
github.com/go-toolsmith/astp v1.1.0 h1:dXPuCl6u2llURjdPLLDxJeZInAeZ0/eZwFJmqZMnpQA=
|
github.com/go-toolsmith/astp v1.1.0 h1:dXPuCl6u2llURjdPLLDxJeZInAeZ0/eZwFJmqZMnpQA=
|
||||||
github.com/go-toolsmith/astp v1.1.0/go.mod h1:0T1xFGz9hicKs8Z5MfAqSUitoUYS30pDMsRVIDHs8CA=
|
github.com/go-toolsmith/astp v1.1.0/go.mod h1:0T1xFGz9hicKs8Z5MfAqSUitoUYS30pDMsRVIDHs8CA=
|
||||||
github.com/go-toolsmith/pkgload v1.2.2 h1:0CtmHq/02QhxcF7E9N5LIFcYFsMR5rdovfqTtRKkgIk=
|
|
||||||
github.com/go-toolsmith/pkgload v1.2.2/go.mod h1:R2hxLNRKuAsiXCo2i5J6ZQPhnPMOVtU+f0arbFPWCus=
|
|
||||||
github.com/go-toolsmith/strparse v1.0.0/go.mod h1:YI2nUKP9YGZnL/L1/DLFBfixrcjslWct4wyljWhSRy8=
|
github.com/go-toolsmith/strparse v1.0.0/go.mod h1:YI2nUKP9YGZnL/L1/DLFBfixrcjslWct4wyljWhSRy8=
|
||||||
github.com/go-toolsmith/strparse v1.1.0 h1:GAioeZUK9TGxnLS+qfdqNbA4z0SSm5zVNtCQiyP2Bvw=
|
github.com/go-toolsmith/strparse v1.1.0 h1:GAioeZUK9TGxnLS+qfdqNbA4z0SSm5zVNtCQiyP2Bvw=
|
||||||
github.com/go-toolsmith/strparse v1.1.0/go.mod h1:7ksGy58fsaQkGQlY8WVoBFNyEPMGuJin1rfoPS4lBSQ=
|
github.com/go-toolsmith/strparse v1.1.0/go.mod h1:7ksGy58fsaQkGQlY8WVoBFNyEPMGuJin1rfoPS4lBSQ=
|
||||||
|
|
@ -330,8 +309,8 @@ github.com/golangci/go-printf-func-name v0.1.1 h1:hIYTFJqAGp1iwoIfsNTpoq1xZAarog
|
||||||
github.com/golangci/go-printf-func-name v0.1.1/go.mod h1:Es64MpWEZbh0UBtTAICOZiB+miW53w/K9Or/4QogJss=
|
github.com/golangci/go-printf-func-name v0.1.1/go.mod h1:Es64MpWEZbh0UBtTAICOZiB+miW53w/K9Or/4QogJss=
|
||||||
github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d h1:viFft9sS/dxoYY0aiOTsLKO2aZQAPT4nlQCsimGcSGE=
|
github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d h1:viFft9sS/dxoYY0aiOTsLKO2aZQAPT4nlQCsimGcSGE=
|
||||||
github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d/go.mod h1:ivJ9QDg0XucIkmwhzCDsqcnxxlDStoTl89jDMIoNxKY=
|
github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d/go.mod h1:ivJ9QDg0XucIkmwhzCDsqcnxxlDStoTl89jDMIoNxKY=
|
||||||
github.com/golangci/golangci-lint/v2 v2.10.1 h1:flhw5Px6ojbLyEFzXvJn5B2HEdkkRlkhE1SnmCbQBiE=
|
github.com/golangci/golangci-lint/v2 v2.11.2 h1:4Icd3mEqthcFcFww8L67OBtfKB/obXxko8aFUMqP/5w=
|
||||||
github.com/golangci/golangci-lint/v2 v2.10.1/go.mod h1:dBsrOk6zj0vDhlTv+IiJGqkDokR24IVTS7W3EVfPTQY=
|
github.com/golangci/golangci-lint/v2 v2.11.2/go.mod h1:wexdFBIQNhHNhDe1oqzlGFE5dYUqlfccWJKWjoWF1GI=
|
||||||
github.com/golangci/golines v0.15.0 h1:Qnph25g8Y1c5fdo1X7GaRDGgnMHgnxh4Gk4VfPTtRx0=
|
github.com/golangci/golines v0.15.0 h1:Qnph25g8Y1c5fdo1X7GaRDGgnMHgnxh4Gk4VfPTtRx0=
|
||||||
github.com/golangci/golines v0.15.0/go.mod h1:AZjXd23tbHMpowhtnGlj9KCNsysj72aeZVVHnVcZx10=
|
github.com/golangci/golines v0.15.0/go.mod h1:AZjXd23tbHMpowhtnGlj9KCNsysj72aeZVVHnVcZx10=
|
||||||
github.com/golangci/misspell v0.8.0 h1:qvxQhiE2/5z+BVRo1kwYA8yGz+lOlu5Jfvtx2b04Jbg=
|
github.com/golangci/misspell v0.8.0 h1:qvxQhiE2/5z+BVRo1kwYA8yGz+lOlu5Jfvtx2b04Jbg=
|
||||||
|
|
@ -369,9 +348,6 @@ github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hf
|
||||||
github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
|
github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
|
||||||
github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
|
github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
|
||||||
github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
|
github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
|
||||||
github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
|
||||||
github.com/google/pprof v0.0.0-20260115054156-294ebfa9ad83 h1:z2ogiKUYzX5Is6zr/vP9vJGqPwcdqsWjOt+V8J7+bTc=
|
|
||||||
github.com/google/pprof v0.0.0-20260115054156-294ebfa9ad83/go.mod h1:MxpfABSjhmINe3F1It9d+8exIHFvUqtLIRCdOGNXqiI=
|
|
||||||
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
|
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
|
||||||
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||||
|
|
@ -390,8 +366,6 @@ github.com/gostaticanalysis/forcetypeassert v0.2.0/go.mod h1:M5iPavzE9pPqWyeiVXS
|
||||||
github.com/gostaticanalysis/nilerr v0.1.2 h1:S6nk8a9N8g062nsx63kUkF6AzbHGw7zzyHMcpu52xQU=
|
github.com/gostaticanalysis/nilerr v0.1.2 h1:S6nk8a9N8g062nsx63kUkF6AzbHGw7zzyHMcpu52xQU=
|
||||||
github.com/gostaticanalysis/nilerr v0.1.2/go.mod h1:A19UHhoY3y8ahoL7YKz6sdjDtduwTSI4CsymaC2htPA=
|
github.com/gostaticanalysis/nilerr v0.1.2/go.mod h1:A19UHhoY3y8ahoL7YKz6sdjDtduwTSI4CsymaC2htPA=
|
||||||
github.com/gostaticanalysis/testutil v0.3.1-0.20210208050101-bfb5c8eec0e4/go.mod h1:D+FIZ+7OahH3ePw/izIEeH5I06eKs1IKI4Xr64/Am3M=
|
github.com/gostaticanalysis/testutil v0.3.1-0.20210208050101-bfb5c8eec0e4/go.mod h1:D+FIZ+7OahH3ePw/izIEeH5I06eKs1IKI4Xr64/Am3M=
|
||||||
github.com/gostaticanalysis/testutil v0.5.0 h1:Dq4wT1DdTwTGCQQv3rl3IvD5Ld0E6HiY+3Zh0sUGqw8=
|
|
||||||
github.com/gostaticanalysis/testutil v0.5.0/go.mod h1:OLQSbuM6zw2EvCcXTz1lVq5unyoNft372msDY0nY5Hs=
|
|
||||||
github.com/hashicorp/cli v1.1.7 h1:/fZJ+hNdwfTSfsxMBa9WWMlfjUZbX8/LnUxgAd7lCVU=
|
github.com/hashicorp/cli v1.1.7 h1:/fZJ+hNdwfTSfsxMBa9WWMlfjUZbX8/LnUxgAd7lCVU=
|
||||||
github.com/hashicorp/cli v1.1.7/go.mod h1:e6Mfpga9OCT1vqzFuoGZiiF/KaG9CbUfO5s3ghU3YgU=
|
github.com/hashicorp/cli v1.1.7/go.mod h1:e6Mfpga9OCT1vqzFuoGZiiF/KaG9CbUfO5s3ghU3YgU=
|
||||||
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
||||||
|
|
@ -437,45 +411,36 @@ github.com/hashicorp/terraform-exec v0.25.0 h1:Bkt6m3VkJqYh+laFMrWIpy9KHYFITpOyz
|
||||||
github.com/hashicorp/terraform-exec v0.25.0/go.mod h1:dl9IwsCfklDU6I4wq9/StFDp7dNbH/h5AnfS1RmiUl8=
|
github.com/hashicorp/terraform-exec v0.25.0/go.mod h1:dl9IwsCfklDU6I4wq9/StFDp7dNbH/h5AnfS1RmiUl8=
|
||||||
github.com/hashicorp/terraform-json v0.27.2 h1:BwGuzM6iUPqf9JYM/Z4AF1OJ5VVJEEzoKST/tRDBJKU=
|
github.com/hashicorp/terraform-json v0.27.2 h1:BwGuzM6iUPqf9JYM/Z4AF1OJ5VVJEEzoKST/tRDBJKU=
|
||||||
github.com/hashicorp/terraform-json v0.27.2/go.mod h1:GzPLJ1PLdUG5xL6xn1OXWIjteQRT2CNT9o/6A9mi9hE=
|
github.com/hashicorp/terraform-json v0.27.2/go.mod h1:GzPLJ1PLdUG5xL6xn1OXWIjteQRT2CNT9o/6A9mi9hE=
|
||||||
github.com/hashicorp/terraform-plugin-codegen-framework v0.4.1 h1:eaI/3dsu2T5QAXbA+7N+B+UBj20GdtYnsRuYypKh3S4=
|
|
||||||
github.com/hashicorp/terraform-plugin-codegen-framework v0.4.1/go.mod h1:kpYM23L7NtcfaQdWAN0QFkV/lU0w16qJ2ddAPCI4zAg=
|
|
||||||
github.com/hashicorp/terraform-plugin-codegen-openapi v0.3.0 h1:IKpc337XKk50QyQPSxLrHwdqSo1E2XqCMxFkWsZcTvc=
|
|
||||||
github.com/hashicorp/terraform-plugin-codegen-openapi v0.3.0/go.mod h1:tT6wl80h7nsMBw+1yZRgJXi+Ys85PUai11weDqysvp4=
|
|
||||||
github.com/hashicorp/terraform-plugin-codegen-spec v0.2.0 h1:91dQG1A/DxP6vRz9GiytDTrZTXDbhHPvmpYnAyWA/Vw=
|
|
||||||
github.com/hashicorp/terraform-plugin-codegen-spec v0.2.0/go.mod h1:fywrEKpordQypmAjz/HIfm2LuNVmyJ6KDe8XT9GdJxQ=
|
|
||||||
github.com/hashicorp/terraform-plugin-docs v0.24.0 h1:YNZYd+8cpYclQyXbl1EEngbld8w7/LPOm99GD5nikIU=
|
github.com/hashicorp/terraform-plugin-docs v0.24.0 h1:YNZYd+8cpYclQyXbl1EEngbld8w7/LPOm99GD5nikIU=
|
||||||
github.com/hashicorp/terraform-plugin-docs v0.24.0/go.mod h1:YLg+7LEwVmRuJc0EuCw0SPLxuQXw5mW8iJ5ml/kvi+o=
|
github.com/hashicorp/terraform-plugin-docs v0.24.0/go.mod h1:YLg+7LEwVmRuJc0EuCw0SPLxuQXw5mW8iJ5ml/kvi+o=
|
||||||
github.com/hashicorp/terraform-plugin-framework v1.17.0 h1:JdX50CFrYcYFY31gkmitAEAzLKoBgsK+iaJjDC8OexY=
|
github.com/hashicorp/terraform-plugin-framework v1.18.0 h1:Xy6OfqSTZfAAKXSlJ810lYvuQvYkOpSUoNMQ9l2L1RA=
|
||||||
github.com/hashicorp/terraform-plugin-framework v1.17.0/go.mod h1:4OUXKdHNosX+ys6rLgVlgklfxN3WHR5VHSOABeS/BM0=
|
github.com/hashicorp/terraform-plugin-framework v1.18.0/go.mod h1:eeFIf68PME+kenJeqSrIcpHhYQK0TOyv7ocKdN4Z35E=
|
||||||
github.com/hashicorp/terraform-plugin-framework-validators v0.19.0 h1:Zz3iGgzxe/1XBkooZCewS0nJAaCFPFPHdNJd8FgE4Ow=
|
github.com/hashicorp/terraform-plugin-framework-validators v0.19.0 h1:Zz3iGgzxe/1XBkooZCewS0nJAaCFPFPHdNJd8FgE4Ow=
|
||||||
github.com/hashicorp/terraform-plugin-framework-validators v0.19.0/go.mod h1:GBKTNGbGVJohU03dZ7U8wHqc2zYnMUawgCN+gC0itLc=
|
github.com/hashicorp/terraform-plugin-framework-validators v0.19.0/go.mod h1:GBKTNGbGVJohU03dZ7U8wHqc2zYnMUawgCN+gC0itLc=
|
||||||
github.com/hashicorp/terraform-plugin-go v0.29.0 h1:1nXKl/nSpaYIUBU1IG/EsDOX0vv+9JxAltQyDMpq5mU=
|
github.com/hashicorp/terraform-plugin-go v0.30.0 h1:VmEiD0n/ewxbvV5VI/bYwNtlSEAXtHaZlSnyUUuQK6k=
|
||||||
github.com/hashicorp/terraform-plugin-go v0.29.0/go.mod h1:vYZbIyvxyy0FWSmDHChCqKvI40cFTDGSb3D8D70i9GM=
|
github.com/hashicorp/terraform-plugin-go v0.30.0/go.mod h1:8d523ORAW8OHgA9e8JKg0ezL3XUO84H0A25o4NY/jRo=
|
||||||
github.com/hashicorp/terraform-plugin-log v0.10.0 h1:eu2kW6/QBVdN4P3Ju2WiB2W3ObjkAsyfBsL3Wh1fj3g=
|
github.com/hashicorp/terraform-plugin-log v0.10.0 h1:eu2kW6/QBVdN4P3Ju2WiB2W3ObjkAsyfBsL3Wh1fj3g=
|
||||||
github.com/hashicorp/terraform-plugin-log v0.10.0/go.mod h1:/9RR5Cv2aAbrqcTSdNmY1NRHP4E3ekrXRGjqORpXyB0=
|
github.com/hashicorp/terraform-plugin-log v0.10.0/go.mod h1:/9RR5Cv2aAbrqcTSdNmY1NRHP4E3ekrXRGjqORpXyB0=
|
||||||
github.com/hashicorp/terraform-plugin-sdk/v2 v2.38.2 h1:sy0Bc4A/GZNdmwpVX/Its9aIweCfY9fRfY1IgmXkOj8=
|
github.com/hashicorp/terraform-plugin-sdk/v2 v2.39.0 h1:ltFG/dSs4mMHNpBqHptCtJqYM4FekUDJbUcWj+6HGlg=
|
||||||
github.com/hashicorp/terraform-plugin-sdk/v2 v2.38.2/go.mod h1:MQisArXYCowb/5q4lDS/BWp5KnXiZ4lxOIyrpKBpUBE=
|
github.com/hashicorp/terraform-plugin-sdk/v2 v2.39.0/go.mod h1:xJk7ap8vRI/B2U6TrVs7bu/gTihyor8XBTLSs5Y6z2w=
|
||||||
github.com/hashicorp/terraform-plugin-testing v1.14.0 h1:5t4VKrjOJ0rg0sVuSJ86dz5K7PHsMO6OKrHFzDBerWA=
|
github.com/hashicorp/terraform-plugin-testing v1.14.0 h1:5t4VKrjOJ0rg0sVuSJ86dz5K7PHsMO6OKrHFzDBerWA=
|
||||||
github.com/hashicorp/terraform-plugin-testing v1.14.0/go.mod h1:1qfWkecyYe1Do2EEOK/5/WnTyvC8wQucUkkhiGLg5nk=
|
github.com/hashicorp/terraform-plugin-testing v1.14.0/go.mod h1:1qfWkecyYe1Do2EEOK/5/WnTyvC8wQucUkkhiGLg5nk=
|
||||||
github.com/hashicorp/terraform-registry-address v0.4.0 h1:S1yCGomj30Sao4l5BMPjTGZmCNzuv7/GDTDX99E9gTk=
|
github.com/hashicorp/terraform-registry-address v0.4.0 h1:S1yCGomj30Sao4l5BMPjTGZmCNzuv7/GDTDX99E9gTk=
|
||||||
github.com/hashicorp/terraform-registry-address v0.4.0/go.mod h1:LRS1Ay0+mAiRkUyltGT+UHWkIqTFvigGn/LbMshfflE=
|
github.com/hashicorp/terraform-registry-address v0.4.0/go.mod h1:LRS1Ay0+mAiRkUyltGT+UHWkIqTFvigGn/LbMshfflE=
|
||||||
github.com/hashicorp/terraform-svchost v0.2.0 h1:wVc2vMiodOHvNZcQw/3y9af1XSomgjGSv+rv3BMCk7I=
|
github.com/hashicorp/terraform-svchost v0.2.1 h1:ubvrTFw3Q7CsoEaX7V06PtCTKG3wu7GyyobAoN4eF3Q=
|
||||||
github.com/hashicorp/terraform-svchost v0.2.0/go.mod h1:/98rrS2yZsbppi4VGVCjwYmh8dqsKzISqK7Hli+0rcQ=
|
github.com/hashicorp/terraform-svchost v0.2.1/go.mod h1:zDMheBLvNzu7Q6o9TBvPqiZToJcSuCLXjAXxBslSky4=
|
||||||
github.com/hashicorp/yamux v0.1.2 h1:XtB8kyFOyHXYVFnwT5C3+Bdo8gArse7j2AQ0DA0Uey8=
|
github.com/hashicorp/yamux v0.1.2 h1:XtB8kyFOyHXYVFnwT5C3+Bdo8gArse7j2AQ0DA0Uey8=
|
||||||
github.com/hashicorp/yamux v0.1.2/go.mod h1:C+zze2n6e/7wshOZep2A70/aQU6QBRWJO/G6FT1wIns=
|
github.com/hashicorp/yamux v0.1.2/go.mod h1:C+zze2n6e/7wshOZep2A70/aQU6QBRWJO/G6FT1wIns=
|
||||||
github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
|
github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
|
||||||
github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg=
|
github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg=
|
||||||
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
|
github.com/huandu/xstrings v1.3.3 h1:/Gcsuc1x8JVbJ9/rlye4xZnVAbEkGauT8lbebqcQws4=
|
||||||
github.com/huandu/xstrings v1.3.3/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
|
github.com/huandu/xstrings v1.3.3/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
|
||||||
github.com/huandu/xstrings v1.4.0 h1:D17IlohoQq4UcpqD7fDk80P7l+lwAmlFaBHgOipl2FU=
|
|
||||||
github.com/huandu/xstrings v1.4.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
|
|
||||||
github.com/iancoleman/strcase v0.3.0 h1:nTXanmYxhfFAMjZL34Ov6gkzEsSJZ5DbhxWjvSASxEI=
|
github.com/iancoleman/strcase v0.3.0 h1:nTXanmYxhfFAMjZL34Ov6gkzEsSJZ5DbhxWjvSASxEI=
|
||||||
github.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho=
|
github.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho=
|
||||||
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
||||||
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
|
||||||
github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA=
|
github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA=
|
||||||
github.com/imdario/mergo v0.3.16 h1:wwQJbIsHYGMUyLSPrEq1CT16AhnhNJQ51+4fdHUnCl4=
|
github.com/imdario/mergo v0.3.15 h1:M8XP7IuFNsqUx6VPK2P9OSmsYsI/YFaGil0uD21V3dM=
|
||||||
github.com/imdario/mergo v0.3.16/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY=
|
github.com/imdario/mergo v0.3.15/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY=
|
||||||
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
|
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
|
||||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||||
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||||
|
|
@ -495,7 +460,6 @@ github.com/jjti/go-spancheck v0.6.5 h1:lmi7pKxa37oKYIMScialXUK6hP3iY5F1gu+mLBPgY
|
||||||
github.com/jjti/go-spancheck v0.6.5/go.mod h1:aEogkeatBrbYsyW6y5TgDfihCulDYciL1B7rG2vSsrU=
|
github.com/jjti/go-spancheck v0.6.5/go.mod h1:aEogkeatBrbYsyW6y5TgDfihCulDYciL1B7rG2vSsrU=
|
||||||
github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0=
|
github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0=
|
||||||
github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
|
github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
|
||||||
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
|
|
||||||
github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4=
|
github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4=
|
||||||
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
|
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
|
||||||
github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||||
|
|
@ -511,8 +475,8 @@ github.com/karamaru-alpha/copyloopvar v1.2.2 h1:yfNQvP9YaGQR7VaWLYcfZUlRP2eo2vhE
|
||||||
github.com/karamaru-alpha/copyloopvar v1.2.2/go.mod h1:oY4rGZqZ879JkJMtX3RRkcXRkmUvH0x35ykgaKgsgJY=
|
github.com/karamaru-alpha/copyloopvar v1.2.2/go.mod h1:oY4rGZqZ879JkJMtX3RRkcXRkmUvH0x35ykgaKgsgJY=
|
||||||
github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4=
|
github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4=
|
||||||
github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM=
|
github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM=
|
||||||
github.com/kisielk/errcheck v1.9.0 h1:9xt1zI9EBfcYBvdU1nVrzMzzUPUtPKs9bVSIM3TAb3M=
|
github.com/kisielk/errcheck v1.10.0 h1:Lvs/YAHP24YKg08LA8oDw2z9fJVme090RAXd90S+rrw=
|
||||||
github.com/kisielk/errcheck v1.9.0/go.mod h1:kQxWMMVZgIkDq7U8xtG/n2juOjbLgZtedi0D+/VL/i8=
|
github.com/kisielk/errcheck v1.10.0/go.mod h1:kQxWMMVZgIkDq7U8xtG/n2juOjbLgZtedi0D+/VL/i8=
|
||||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||||
github.com/kkHAIKE/contextcheck v1.1.6 h1:7HIyRcnyzxL9Lz06NGhiKvenXq7Zw6Q0UQu/ttjfJCE=
|
github.com/kkHAIKE/contextcheck v1.1.6 h1:7HIyRcnyzxL9Lz06NGhiKvenXq7Zw6Q0UQu/ttjfJCE=
|
||||||
github.com/kkHAIKE/contextcheck v1.1.6/go.mod h1:3dDbMRNBFaq8HFXWC1JyvDSPm43CmE6IuHam8Wr0rkg=
|
github.com/kkHAIKE/contextcheck v1.1.6/go.mod h1:3dDbMRNBFaq8HFXWC1JyvDSPm43CmE6IuHam8Wr0rkg=
|
||||||
|
|
@ -554,8 +518,6 @@ github.com/macabu/inamedparam v0.2.0 h1:VyPYpOc10nkhI2qeNUdh3Zket4fcZjEWe35poddB
|
||||||
github.com/macabu/inamedparam v0.2.0/go.mod h1:+Pee9/YfGe5LJ62pYXqB89lJ+0k5bsR8Wgz/C0Zlq3U=
|
github.com/macabu/inamedparam v0.2.0/go.mod h1:+Pee9/YfGe5LJ62pYXqB89lJ+0k5bsR8Wgz/C0Zlq3U=
|
||||||
github.com/magiconair/properties v1.8.6 h1:5ibWZ6iY0NctNGWo87LalDlEZ6R41TqbbDamhfG/Qzo=
|
github.com/magiconair/properties v1.8.6 h1:5ibWZ6iY0NctNGWo87LalDlEZ6R41TqbbDamhfG/Qzo=
|
||||||
github.com/magiconair/properties v1.8.6/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60=
|
github.com/magiconair/properties v1.8.6/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60=
|
||||||
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
|
|
||||||
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
|
|
||||||
github.com/manuelarte/embeddedstructfieldcheck v0.4.0 h1:3mAIyaGRtjK6EO9E73JlXLtiy7ha80b2ZVGyacxgfww=
|
github.com/manuelarte/embeddedstructfieldcheck v0.4.0 h1:3mAIyaGRtjK6EO9E73JlXLtiy7ha80b2ZVGyacxgfww=
|
||||||
github.com/manuelarte/embeddedstructfieldcheck v0.4.0/go.mod h1:z8dFSyXqp+fC6NLDSljRJeNQJJDWnY7RoWFzV3PC6UM=
|
github.com/manuelarte/embeddedstructfieldcheck v0.4.0/go.mod h1:z8dFSyXqp+fC6NLDSljRJeNQJJDWnY7RoWFzV3PC6UM=
|
||||||
github.com/manuelarte/funcorder v0.5.0 h1:llMuHXXbg7tD0i/LNw8vGnkDTHFpTnWqKPI85Rknc+8=
|
github.com/manuelarte/funcorder v0.5.0 h1:llMuHXXbg7tD0i/LNw8vGnkDTHFpTnWqKPI85Rknc+8=
|
||||||
|
|
@ -566,7 +528,6 @@ github.com/maratori/testpackage v1.1.2 h1:ffDSh+AgqluCLMXhM19f/cpvQAKygKAJXFl9aU
|
||||||
github.com/maratori/testpackage v1.1.2/go.mod h1:8F24GdVDFW5Ew43Et02jamrVMNXLUNaOynhDssITGfc=
|
github.com/maratori/testpackage v1.1.2/go.mod h1:8F24GdVDFW5Ew43Et02jamrVMNXLUNaOynhDssITGfc=
|
||||||
github.com/matoous/godox v1.1.0 h1:W5mqwbyWrwZv6OQ5Z1a/DHGMOvXYCBP3+Ht7KMoJhq4=
|
github.com/matoous/godox v1.1.0 h1:W5mqwbyWrwZv6OQ5Z1a/DHGMOvXYCBP3+Ht7KMoJhq4=
|
||||||
github.com/matoous/godox v1.1.0/go.mod h1:jgE/3fUXiTurkdHOLT5WEkThTSuE7yxHv5iWPa80afs=
|
github.com/matoous/godox v1.1.0/go.mod h1:jgE/3fUXiTurkdHOLT5WEkThTSuE7yxHv5iWPa80afs=
|
||||||
github.com/matryer/is v1.4.0 h1:sosSmIWwkYITGrxZ25ULNDeKiMNzFSr4V/eqBQP0PeE=
|
|
||||||
github.com/matryer/is v1.4.0/go.mod h1:8I/i5uYgLzgsgEloJE1U6xx5HkBQpAZvepWuujKwMRU=
|
github.com/matryer/is v1.4.0/go.mod h1:8I/i5uYgLzgsgEloJE1U6xx5HkBQpAZvepWuujKwMRU=
|
||||||
github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
|
github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
|
||||||
github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4=
|
github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4=
|
||||||
|
|
@ -576,14 +537,16 @@ github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Ky
|
||||||
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
|
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
|
||||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||||
|
github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0=
|
||||||
|
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
|
||||||
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
|
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
|
||||||
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
||||||
github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU=
|
github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU=
|
||||||
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
|
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
|
||||||
github.com/maxatome/go-testdeep v1.14.0 h1:rRlLv1+kI8eOI3OaBXZwb3O7xY3exRzdW5QyX48g9wI=
|
github.com/maxatome/go-testdeep v1.14.0 h1:rRlLv1+kI8eOI3OaBXZwb3O7xY3exRzdW5QyX48g9wI=
|
||||||
github.com/maxatome/go-testdeep v1.14.0/go.mod h1:lPZc/HAcJMP92l7yI6TRz1aZN5URwUBUAfUNvrclaNM=
|
github.com/maxatome/go-testdeep v1.14.0/go.mod h1:lPZc/HAcJMP92l7yI6TRz1aZN5URwUBUAfUNvrclaNM=
|
||||||
github.com/mgechev/revive v1.14.0 h1:CC2Ulb3kV7JFYt+izwORoS3VT/+Plb8BvslI/l1yZsc=
|
github.com/mgechev/revive v1.15.0 h1:vJ0HzSBzfNyPbHKolgiFjHxLek9KUijhqh42yGoqZ8Q=
|
||||||
github.com/mgechev/revive v1.14.0/go.mod h1:MvnujelCZBZCaoDv5B3foPo6WWgULSSFxvfxp7GsPfo=
|
github.com/mgechev/revive v1.15.0/go.mod h1:LlAKO3QQe9OJ0pVZzI2GPa8CbXGZ/9lNpCGvK4T/a8A=
|
||||||
github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw=
|
github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw=
|
||||||
github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw=
|
github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw=
|
||||||
github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s=
|
github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s=
|
||||||
|
|
@ -617,35 +580,13 @@ github.com/nishanths/predeclared v0.2.2 h1:V2EPdZPliZymNAn79T8RkNApBjMmVKh5XRpLm
|
||||||
github.com/nishanths/predeclared v0.2.2/go.mod h1:RROzoN6TnGQupbC+lqggsOlcgysk3LMK/HI84Mp280c=
|
github.com/nishanths/predeclared v0.2.2/go.mod h1:RROzoN6TnGQupbC+lqggsOlcgysk3LMK/HI84Mp280c=
|
||||||
github.com/nunnatsa/ginkgolinter v0.23.0 h1:x3o4DGYOWbBMP/VdNQKgSj+25aJKx2Pe6lHr8gBcgf8=
|
github.com/nunnatsa/ginkgolinter v0.23.0 h1:x3o4DGYOWbBMP/VdNQKgSj+25aJKx2Pe6lHr8gBcgf8=
|
||||||
github.com/nunnatsa/ginkgolinter v0.23.0/go.mod h1:9qN1+0akwXEccwV1CAcCDfcoBlWXHB+ML9884pL4SZ4=
|
github.com/nunnatsa/ginkgolinter v0.23.0/go.mod h1:9qN1+0akwXEccwV1CAcCDfcoBlWXHB+ML9884pL4SZ4=
|
||||||
github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A=
|
|
||||||
github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE=
|
|
||||||
github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU=
|
|
||||||
github.com/oklog/run v1.2.0 h1:O8x3yXwah4A73hJdlrwo/2X6J62gE5qTMusH0dvz60E=
|
github.com/oklog/run v1.2.0 h1:O8x3yXwah4A73hJdlrwo/2X6J62gE5qTMusH0dvz60E=
|
||||||
github.com/oklog/run v1.2.0/go.mod h1:mgDbKRSwPhJfesJ4PntqFUbKQRZ50NgmZTSPlFA0YFk=
|
github.com/oklog/run v1.2.0/go.mod h1:mgDbKRSwPhJfesJ4PntqFUbKQRZ50NgmZTSPlFA0YFk=
|
||||||
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
|
||||||
github.com/onsi/ginkgo v1.10.2/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
|
||||||
github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk=
|
|
||||||
github.com/onsi/ginkgo v1.16.4 h1:29JGrr5oVBm5ulCWet69zQkzWipVXIol6ygQUe/EzNc=
|
|
||||||
github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0=
|
|
||||||
github.com/onsi/ginkgo/v2 v2.1.3/go.mod h1:vw5CSIxN1JObi/U8gcbwft7ZxR2dgaR70JSE3/PpL4c=
|
|
||||||
github.com/onsi/ginkgo/v2 v2.28.1 h1:S4hj+HbZp40fNKuLUQOYLDgZLwNUVn19N3Atb98NCyI=
|
|
||||||
github.com/onsi/ginkgo/v2 v2.28.1/go.mod h1:CLtbVInNckU3/+gC8LzkGUb9oF+e8W8TdUsxPwvdOgE=
|
|
||||||
github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
|
|
||||||
github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY=
|
|
||||||
github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo=
|
|
||||||
github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY=
|
|
||||||
github.com/onsi/gomega v1.19.0/go.mod h1:LY+I3pBVzYsTBU1AnDwOSxaYi9WoWiqgwooUqq9yPro=
|
|
||||||
github.com/onsi/gomega v1.39.1 h1:1IJLAad4zjPn2PsnhH70V4DKRFlrCzGBNrNaru+Vf28=
|
|
||||||
github.com/onsi/gomega v1.39.1/go.mod h1:hL6yVALoTOxeWudERyfppUcZXjMwIMLnuSfruD2lcfg=
|
|
||||||
github.com/otiai10/copy v1.2.0/go.mod h1:rrF5dJ5F0t/EWSYODDu4j9/vEeYHMkc8jt0zJChqQWw=
|
github.com/otiai10/copy v1.2.0/go.mod h1:rrF5dJ5F0t/EWSYODDu4j9/vEeYHMkc8jt0zJChqQWw=
|
||||||
github.com/otiai10/copy v1.14.0 h1:dCI/t1iTdYGtkvCuBG2BgR6KZa83PTclw4U5n2wAllU=
|
|
||||||
github.com/otiai10/copy v1.14.0/go.mod h1:ECfuL02W+/FkTWZWgQqXPWZgW9oeKCSQ5qVfSc4qc4w=
|
|
||||||
github.com/otiai10/curr v0.0.0-20150429015615-9b4961190c95/go.mod h1:9qAhocn7zKJG+0mI8eUu6xqkFDYS2kb2saOteoSB3cE=
|
github.com/otiai10/curr v0.0.0-20150429015615-9b4961190c95/go.mod h1:9qAhocn7zKJG+0mI8eUu6xqkFDYS2kb2saOteoSB3cE=
|
||||||
github.com/otiai10/curr v1.0.0/go.mod h1:LskTG5wDwr8Rs+nNQ+1LlxRjAtTZZjtJW4rMXl6j4vs=
|
github.com/otiai10/curr v1.0.0/go.mod h1:LskTG5wDwr8Rs+nNQ+1LlxRjAtTZZjtJW4rMXl6j4vs=
|
||||||
github.com/otiai10/mint v1.3.0/go.mod h1:F5AjcsTsWUqX+Na9fpHb52P8pcRX2CI6A3ctIT91xUo=
|
github.com/otiai10/mint v1.3.0/go.mod h1:F5AjcsTsWUqX+Na9fpHb52P8pcRX2CI6A3ctIT91xUo=
|
||||||
github.com/otiai10/mint v1.3.1/go.mod h1:/yxELlJQ0ufhjUwhshSj+wFjZ78CnZ48/1wtmBH1OTc=
|
github.com/otiai10/mint v1.3.1/go.mod h1:/yxELlJQ0ufhjUwhshSj+wFjZ78CnZ48/1wtmBH1OTc=
|
||||||
github.com/pb33f/libopenapi v0.15.0 h1:AoBYIY3HXqDDF8O9kcudlqWaRFZZJmgtueE649oHzIw=
|
|
||||||
github.com/pb33f/libopenapi v0.15.0/go.mod h1:m+4Pwri31UvcnZjuP8M7TlbR906DXJmMvYsbis234xg=
|
|
||||||
github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8=
|
github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8=
|
||||||
github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
|
github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
|
||||||
github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
|
github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
|
||||||
|
|
@ -713,9 +654,8 @@ github.com/sashamelentyev/interfacebloat v1.1.0 h1:xdRdJp0irL086OyW1H/RTZTr1h/tM
|
||||||
github.com/sashamelentyev/interfacebloat v1.1.0/go.mod h1:+Y9yU5YdTkrNvoX0xHc84dxiN1iBi9+G8zZIhPVoNjQ=
|
github.com/sashamelentyev/interfacebloat v1.1.0/go.mod h1:+Y9yU5YdTkrNvoX0xHc84dxiN1iBi9+G8zZIhPVoNjQ=
|
||||||
github.com/sashamelentyev/usestdlibvars v1.29.0 h1:8J0MoRrw4/NAXtjQqTHrbW9NN+3iMf7Knkq057v4XOQ=
|
github.com/sashamelentyev/usestdlibvars v1.29.0 h1:8J0MoRrw4/NAXtjQqTHrbW9NN+3iMf7Knkq057v4XOQ=
|
||||||
github.com/sashamelentyev/usestdlibvars v1.29.0/go.mod h1:8PpnjHMk5VdeWlVb4wCdrB8PNbLqZ3wBZTZWkrpZZL8=
|
github.com/sashamelentyev/usestdlibvars v1.29.0/go.mod h1:8PpnjHMk5VdeWlVb4wCdrB8PNbLqZ3wBZTZWkrpZZL8=
|
||||||
github.com/securego/gosec/v2 v2.23.0 h1:h4TtF64qFzvnkqvsHC/knT7YC5fqyOCItlVR8+ptEBo=
|
github.com/securego/gosec/v2 v2.24.7 h1:3k5yJnrhT1TTdsG0ZsnenlfCcT+7Y/+zeCPHbL7QAn8=
|
||||||
github.com/securego/gosec/v2 v2.23.0/go.mod h1:qRHEgXLFuYUDkI2T7W7NJAmOkxVhkR0x9xyHOIcMNZ0=
|
github.com/securego/gosec/v2 v2.24.7/go.mod h1:AdDJbjcG/XxFgVv7pW19vMNYlFM6+Q6Qy3t6lWAUcEY=
|
||||||
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
|
|
||||||
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8=
|
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8=
|
||||||
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4=
|
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4=
|
||||||
github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
|
github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
|
||||||
|
|
@ -732,15 +672,15 @@ github.com/sivchari/containedctx v1.0.3 h1:x+etemjbsh2fB5ewm5FeLNi5bUjK0V8n0RB+W
|
||||||
github.com/sivchari/containedctx v1.0.3/go.mod h1:c1RDvCbnJLtH4lLcYD/GqwiBSSf4F5Qk0xld2rBqzJ4=
|
github.com/sivchari/containedctx v1.0.3/go.mod h1:c1RDvCbnJLtH4lLcYD/GqwiBSSf4F5Qk0xld2rBqzJ4=
|
||||||
github.com/skeema/knownhosts v1.3.1 h1:X2osQ+RAjK76shCbvhHHHVl3ZlgDm8apHEHFqRjnBY8=
|
github.com/skeema/knownhosts v1.3.1 h1:X2osQ+RAjK76shCbvhHHHVl3ZlgDm8apHEHFqRjnBY8=
|
||||||
github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQNxPwTcfiY=
|
github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQNxPwTcfiY=
|
||||||
github.com/sonatard/noctx v0.4.0 h1:7MC/5Gg4SQ4lhLYR6mvOP6mQVSxCrdyiExo7atBs27o=
|
github.com/sonatard/noctx v0.5.0 h1:e/jdaqAsuWVOKQ0P6NWiIdDNHmHT5SwuuSfojFjzwrw=
|
||||||
github.com/sonatard/noctx v0.4.0/go.mod h1:64XdbzFb18XL4LporKXp8poqZtPKbCrqQ402CV+kJas=
|
github.com/sonatard/noctx v0.5.0/go.mod h1:64XdbzFb18XL4LporKXp8poqZtPKbCrqQ402CV+kJas=
|
||||||
github.com/sourcegraph/go-diff v0.7.0 h1:9uLlrd5T46OXs5qpp8L/MTltk0zikUGi0sNNyCpA8G0=
|
github.com/sourcegraph/go-diff v0.7.0 h1:9uLlrd5T46OXs5qpp8L/MTltk0zikUGi0sNNyCpA8G0=
|
||||||
github.com/sourcegraph/go-diff v0.7.0/go.mod h1:iBszgVvyxdc8SFZ7gm69go2KDdt3ag071iBaWPF6cjs=
|
github.com/sourcegraph/go-diff v0.7.0/go.mod h1:iBszgVvyxdc8SFZ7gm69go2KDdt3ag071iBaWPF6cjs=
|
||||||
github.com/spf13/afero v1.15.0 h1:b/YBCLWAJdFWJTN9cLhiXXcD7mzKn9Dm86dNnfyQw1I=
|
github.com/spf13/afero v1.15.0 h1:b/YBCLWAJdFWJTN9cLhiXXcD7mzKn9Dm86dNnfyQw1I=
|
||||||
github.com/spf13/afero v1.15.0/go.mod h1:NC2ByUVxtQs4b3sIUphxK0NioZnmxgyCrfzeuq8lxMg=
|
github.com/spf13/afero v1.15.0/go.mod h1:NC2ByUVxtQs4b3sIUphxK0NioZnmxgyCrfzeuq8lxMg=
|
||||||
github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
|
github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
|
||||||
github.com/spf13/cast v1.5.1 h1:R+kOtfhWQE6TVQzY+4D7wJLBgkdVasCEFxSUBYBYIlA=
|
github.com/spf13/cast v1.5.0 h1:rj3WzYc11XZaIZMPKmwP96zkFEnnAmV8s6XbB2aY32w=
|
||||||
github.com/spf13/cast v1.5.1/go.mod h1:b9PdjNptOpzXr7Rq1q9gJML/2cdGQAo69NKzQ10KN48=
|
github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU=
|
||||||
github.com/spf13/cobra v1.4.0/go.mod h1:Wo4iy3BUC+X2Fybo0PDqwJIv3dNRiZLHQymsfxlB84g=
|
github.com/spf13/cobra v1.4.0/go.mod h1:Wo4iy3BUC+X2Fybo0PDqwJIv3dNRiZLHQymsfxlB84g=
|
||||||
github.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU=
|
github.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU=
|
||||||
github.com/spf13/cobra v1.10.2/go.mod h1:7C1pvHqHw5A4vrJfjNwvOdzYu0Gml16OCs2GRiTUUS4=
|
github.com/spf13/cobra v1.10.2/go.mod h1:7C1pvHqHw5A4vrJfjNwvOdzYu0Gml16OCs2GRiTUUS4=
|
||||||
|
|
@ -754,10 +694,12 @@ github.com/spf13/viper v1.12.0 h1:CZ7eSOd3kZoaYDLbXnmzgQI5RlciuXBMA+18HwHRfZQ=
|
||||||
github.com/spf13/viper v1.12.0/go.mod h1:b6COn30jlNxbm/V2IqWiNWkJ+vZNiMNksliPCiuKtSI=
|
github.com/spf13/viper v1.12.0/go.mod h1:b6COn30jlNxbm/V2IqWiNWkJ+vZNiMNksliPCiuKtSI=
|
||||||
github.com/ssgreg/nlreturn/v2 v2.2.1 h1:X4XDI7jstt3ySqGU86YGAURbxw3oTDPK9sPEi6YEwQ0=
|
github.com/ssgreg/nlreturn/v2 v2.2.1 h1:X4XDI7jstt3ySqGU86YGAURbxw3oTDPK9sPEi6YEwQ0=
|
||||||
github.com/ssgreg/nlreturn/v2 v2.2.1/go.mod h1:E/iiPB78hV7Szg2YfRgyIrk1AD6JVMTRkkxBiELzh2I=
|
github.com/ssgreg/nlreturn/v2 v2.2.1/go.mod h1:E/iiPB78hV7Szg2YfRgyIrk1AD6JVMTRkkxBiELzh2I=
|
||||||
github.com/stackitcloud/stackit-sdk-go/core v0.21.1 h1:Y/PcAgM7DPYMNqum0MLv4n1mF9ieuevzcCIZYQfm3Ts=
|
github.com/stackitcloud/stackit-sdk-go/core v0.22.0 h1:6rViz7GnNwXSh51Lur5xuDzO8EWSZfN9J0HvEkBKq6c=
|
||||||
github.com/stackitcloud/stackit-sdk-go/core v0.21.1/go.mod h1:osMglDby4csGZ5sIfhNyYq1bS1TxIdPY88+skE/kkmI=
|
github.com/stackitcloud/stackit-sdk-go/core v0.22.0/go.mod h1:osMglDby4csGZ5sIfhNyYq1bS1TxIdPY88+skE/kkmI=
|
||||||
github.com/stackitcloud/stackit-sdk-go/services/iaasalpha v0.1.23-alpha h1:ugpMOMUZGB0yXsWcfe97F7GCdjlexbjFuGD8ZeyMSts=
|
github.com/stackitcloud/stackit-sdk-go/services/postgresflex v1.4.0 h1:4wfRYOEFSpNLPvOV0YNIoGLVQBIQNkCvZwmL7JFzphM=
|
||||||
github.com/stackitcloud/stackit-sdk-go/services/iaasalpha v0.1.23-alpha/go.mod h1:v5VGvTxLcCdJJmblbhqYalt/MFHcElDfYoy15CMhaWs=
|
github.com/stackitcloud/stackit-sdk-go/services/postgresflex v1.4.0/go.mod h1:tIYiqgnS9929dEhQjf6rx1yNsdFf59e4r2wcXQMkLYo=
|
||||||
|
github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex v1.5.0 h1:JeSnhioDCfV5K4V4mOjKtKgkgNtrkrU9bkt7JBs57lA=
|
||||||
|
github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex v1.5.0/go.mod h1:3NQNKhHYIjIHTmf6RAcYLdnq17a8AZKkqFCu9Q/Y/3Y=
|
||||||
github.com/stbenjam/no-sprintf-host-port v0.3.1 h1:AyX7+dxI4IdLBPtDbsGAyqiTSLpCP9hWRrXQDU4Cm/g=
|
github.com/stbenjam/no-sprintf-host-port v0.3.1 h1:AyX7+dxI4IdLBPtDbsGAyqiTSLpCP9hWRrXQDU4Cm/g=
|
||||||
github.com/stbenjam/no-sprintf-host-port v0.3.1/go.mod h1:ODbZesTCHMVKthBHskvUUexdcNHAQRXk9NpSsL8p/HQ=
|
github.com/stbenjam/no-sprintf-host-port v0.3.1/go.mod h1:ODbZesTCHMVKthBHskvUUexdcNHAQRXk9NpSsL8p/HQ=
|
||||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
|
|
@ -775,9 +717,7 @@ github.com/subosito/gotenv v1.4.1 h1:jyEFiXpy21Wm81FBN71l9VoMMV8H8jG+qIK3GCpY6Qs
|
||||||
github.com/subosito/gotenv v1.4.1/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0=
|
github.com/subosito/gotenv v1.4.1/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0=
|
||||||
github.com/teambition/rrule-go v1.8.2 h1:lIjpjvWTj9fFUZCmuoVDrKVOtdiyzbzc93qTmRVe/J8=
|
github.com/teambition/rrule-go v1.8.2 h1:lIjpjvWTj9fFUZCmuoVDrKVOtdiyzbzc93qTmRVe/J8=
|
||||||
github.com/teambition/rrule-go v1.8.2/go.mod h1:Ieq5AbrKGciP1V//Wq8ktsTXwSwJHDD5mD/wLBGl3p4=
|
github.com/teambition/rrule-go v1.8.2/go.mod h1:Ieq5AbrKGciP1V//Wq8ktsTXwSwJHDD5mD/wLBGl3p4=
|
||||||
github.com/tenntenn/modver v1.0.1 h1:2klLppGhDgzJrScMpkj9Ujy3rXPUspSjAcev9tSEBgA=
|
|
||||||
github.com/tenntenn/modver v1.0.1/go.mod h1:bePIyQPb7UeioSRkw3Q0XeMhYZSMx9B8ePqg6SAMGH0=
|
github.com/tenntenn/modver v1.0.1/go.mod h1:bePIyQPb7UeioSRkw3Q0XeMhYZSMx9B8ePqg6SAMGH0=
|
||||||
github.com/tenntenn/text/transform v0.0.0-20200319021203-7eef512accb3 h1:f+jULpRQGxTSkNYKJ51yaw6ChIqO+Je8UqsTKN/cDag=
|
|
||||||
github.com/tenntenn/text/transform v0.0.0-20200319021203-7eef512accb3/go.mod h1:ON8b8w4BN/kE1EOhwT0o+d62W65a6aPw1nouo9LMgyY=
|
github.com/tenntenn/text/transform v0.0.0-20200319021203-7eef512accb3/go.mod h1:ON8b8w4BN/kE1EOhwT0o+d62W65a6aPw1nouo9LMgyY=
|
||||||
github.com/tetafro/godot v1.5.4 h1:u1ww+gqpRLiIA16yF2PV1CV1n/X3zhyezbNXC3E14Sg=
|
github.com/tetafro/godot v1.5.4 h1:u1ww+gqpRLiIA16yF2PV1CV1n/X3zhyezbNXC3E14Sg=
|
||||||
github.com/tetafro/godot v1.5.4/go.mod h1:eOkMrVQurDui411nBY2FA05EYH01r14LuWY/NrVDVcU=
|
github.com/tetafro/godot v1.5.4/go.mod h1:eOkMrVQurDui411nBY2FA05EYH01r14LuWY/NrVDVcU=
|
||||||
|
|
@ -793,8 +733,8 @@ github.com/ultraware/funlen v0.2.0 h1:gCHmCn+d2/1SemTdYMiKLAHFYxTYz7z9VIDRaTGyLk
|
||||||
github.com/ultraware/funlen v0.2.0/go.mod h1:ZE0q4TsJ8T1SQcjmkhN/w+MceuatI6pBFSxxyteHIJA=
|
github.com/ultraware/funlen v0.2.0/go.mod h1:ZE0q4TsJ8T1SQcjmkhN/w+MceuatI6pBFSxxyteHIJA=
|
||||||
github.com/ultraware/whitespace v0.2.0 h1:TYowo2m9Nfj1baEQBjuHzvMRbp19i+RCcRYrSWoFa+g=
|
github.com/ultraware/whitespace v0.2.0 h1:TYowo2m9Nfj1baEQBjuHzvMRbp19i+RCcRYrSWoFa+g=
|
||||||
github.com/ultraware/whitespace v0.2.0/go.mod h1:XcP1RLD81eV4BW8UhQlpaR+SDc2givTvyI8a586WjW8=
|
github.com/ultraware/whitespace v0.2.0/go.mod h1:XcP1RLD81eV4BW8UhQlpaR+SDc2givTvyI8a586WjW8=
|
||||||
github.com/uudashr/gocognit v1.2.0 h1:3BU9aMr1xbhPlvJLSydKwdLN3tEUUrzPSSM8S4hDYRA=
|
github.com/uudashr/gocognit v1.2.1 h1:CSJynt5txTnORn/DkhiB4mZjwPuifyASC8/6Q0I/QS4=
|
||||||
github.com/uudashr/gocognit v1.2.0/go.mod h1:k/DdKPI6XBZO1q7HgoV2juESI2/Ofj9AcHPZhBBdrTU=
|
github.com/uudashr/gocognit v1.2.1/go.mod h1:acaubQc6xYlXFEMb9nWX2dYBzJ/bIjEkc1zzvyIZg5Q=
|
||||||
github.com/uudashr/iface v1.4.1 h1:J16Xl1wyNX9ofhpHmQ9h9gk5rnv2A6lX/2+APLTo0zU=
|
github.com/uudashr/iface v1.4.1 h1:J16Xl1wyNX9ofhpHmQ9h9gk5rnv2A6lX/2+APLTo0zU=
|
||||||
github.com/uudashr/iface v1.4.1/go.mod h1:pbeBPlbuU2qkNDn0mmfrxP2X+wjPMIQAy+r1MBXSXtg=
|
github.com/uudashr/iface v1.4.1/go.mod h1:pbeBPlbuU2qkNDn0mmfrxP2X+wjPMIQAy+r1MBXSXtg=
|
||||||
github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk=
|
github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk=
|
||||||
|
|
@ -804,19 +744,8 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU
|
||||||
github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok=
|
github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok=
|
||||||
github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g=
|
github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g=
|
||||||
github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds=
|
github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds=
|
||||||
github.com/vmware-labs/yaml-jsonpath v0.3.2 h1:/5QKeCBGdsInyDCyVNLbXyilb61MXGi9NP674f9Hobk=
|
|
||||||
github.com/vmware-labs/yaml-jsonpath v0.3.2/go.mod h1:U6whw1z03QyqgWdgXxvVnQ90zN1BWz5V+51Ewf8k+rQ=
|
|
||||||
github.com/wk8/go-ordered-map/v2 v2.1.8 h1:5h/BUHu93oj4gIdvHHHGsScSTMijfx5PeYkE/fJgbpc=
|
|
||||||
github.com/wk8/go-ordered-map/v2 v2.1.8/go.mod h1:5nJHM5DyteebpVlHnWMV0rPz6Zp7+xBAnxjb1X5vnTw=
|
|
||||||
github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM=
|
github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM=
|
||||||
github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw=
|
github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw=
|
||||||
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
|
|
||||||
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo=
|
|
||||||
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
|
|
||||||
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0=
|
|
||||||
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=
|
|
||||||
github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74=
|
|
||||||
github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y=
|
|
||||||
github.com/xen0n/gosmopolitan v1.3.0 h1:zAZI1zefvo7gcpbCOrPSHJZJYA9ZgLfJqtKzZ5pHqQM=
|
github.com/xen0n/gosmopolitan v1.3.0 h1:zAZI1zefvo7gcpbCOrPSHJZJYA9ZgLfJqtKzZ5pHqQM=
|
||||||
github.com/xen0n/gosmopolitan v1.3.0/go.mod h1:rckfr5T6o4lBtM1ga7mLGKZmLxswUoH1zxHgNXOsEt4=
|
github.com/xen0n/gosmopolitan v1.3.0/go.mod h1:rckfr5T6o4lBtM1ga7mLGKZmLxswUoH1zxHgNXOsEt4=
|
||||||
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no=
|
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no=
|
||||||
|
|
@ -838,14 +767,12 @@ github.com/yuin/goldmark v1.7.7 h1:5m9rrB1sW3JUMToKFQfb+FGt1U7r57IHu5GrYrG2nqU=
|
||||||
github.com/yuin/goldmark v1.7.7/go.mod h1:uzxRWxtg69N339t3louHJ7+O03ezfj6PlliRlaOzY1E=
|
github.com/yuin/goldmark v1.7.7/go.mod h1:uzxRWxtg69N339t3louHJ7+O03ezfj6PlliRlaOzY1E=
|
||||||
github.com/yuin/goldmark-meta v1.1.0 h1:pWw+JLHGZe8Rk0EGsMVssiNb/AaPMHfSRszZeUeiOUc=
|
github.com/yuin/goldmark-meta v1.1.0 h1:pWw+JLHGZe8Rk0EGsMVssiNb/AaPMHfSRszZeUeiOUc=
|
||||||
github.com/yuin/goldmark-meta v1.1.0/go.mod h1:U4spWENafuA7Zyg+Lj5RqK/MF+ovMYtBvXi1lBb2VP0=
|
github.com/yuin/goldmark-meta v1.1.0/go.mod h1:U4spWENafuA7Zyg+Lj5RqK/MF+ovMYtBvXi1lBb2VP0=
|
||||||
github.com/zclconf/go-cty v1.17.0 h1:seZvECve6XX4tmnvRzWtJNHdscMtYEx5R7bnnVyd/d0=
|
github.com/zclconf/go-cty v1.18.0 h1:pJ8+HNI4gFoyRNqVE37wWbJWVw43BZczFo7KUoRczaA=
|
||||||
github.com/zclconf/go-cty v1.17.0/go.mod h1:wqFzcImaLTI6A5HfsRwB0nj5n0MRZFwmey8YoFPPs3U=
|
github.com/zclconf/go-cty v1.18.0/go.mod h1:qpnV6EDNgC1sns/AleL1fvatHw72j+S+nS+MJ+T2CSg=
|
||||||
github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940 h1:4r45xpDWB6ZMSMNJFMOjqrGHynW3DIBuR2H9j0ug+Mo=
|
github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940 h1:4r45xpDWB6ZMSMNJFMOjqrGHynW3DIBuR2H9j0ug+Mo=
|
||||||
github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940/go.mod h1:CmBdvvj3nqzfzJ6nTCIwDTPZ56aVGvDrmztiO5g3qrM=
|
github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940/go.mod h1:CmBdvvj3nqzfzJ6nTCIwDTPZ56aVGvDrmztiO5g3qrM=
|
||||||
gitlab.com/bosi/decorder v0.4.2 h1:qbQaV3zgwnBZ4zPMhGLW4KZe7A7NwxEhJx39R3shffo=
|
gitlab.com/bosi/decorder v0.4.2 h1:qbQaV3zgwnBZ4zPMhGLW4KZe7A7NwxEhJx39R3shffo=
|
||||||
gitlab.com/bosi/decorder v0.4.2/go.mod h1:muuhHoaJkA9QLcYHq4Mj8FJUwDZ+EirSHRiaTcTf6T8=
|
gitlab.com/bosi/decorder v0.4.2/go.mod h1:muuhHoaJkA9QLcYHq4Mj8FJUwDZ+EirSHRiaTcTf6T8=
|
||||||
go-simpler.org/assert v0.9.0 h1:PfpmcSvL7yAnWyChSjOz6Sp6m9j5lyK8Ok9pEL31YkQ=
|
|
||||||
go-simpler.org/assert v0.9.0/go.mod h1:74Eqh5eI6vCK6Y5l3PI8ZYFXG4Sa+tkr70OIPJAUr28=
|
|
||||||
go-simpler.org/musttag v0.14.0 h1:XGySZATqQYSEV3/YTy+iX+aofbZZllJaqwFWs+RTtSo=
|
go-simpler.org/musttag v0.14.0 h1:XGySZATqQYSEV3/YTy+iX+aofbZZllJaqwFWs+RTtSo=
|
||||||
go-simpler.org/musttag v0.14.0/go.mod h1:uP8EymctQjJ4Z1kUnjX0u2l60WfUdQxCwSNKzE1JEOE=
|
go-simpler.org/musttag v0.14.0/go.mod h1:uP8EymctQjJ4Z1kUnjX0u2l60WfUdQxCwSNKzE1JEOE=
|
||||||
go-simpler.org/sloglint v0.11.1 h1:xRbPepLT/MHPTCA6TS/wNfZrDzkGvCCqUv4Bdwc3H7s=
|
go-simpler.org/sloglint v0.11.1 h1:xRbPepLT/MHPTCA6TS/wNfZrDzkGvCCqUv4Bdwc3H7s=
|
||||||
|
|
@ -873,8 +800,6 @@ go.opentelemetry.io/otel/sdk/metric v1.39.0 h1:cXMVVFVgsIf2YL6QkRF4Urbr/aMInf+2W
|
||||||
go.opentelemetry.io/otel/sdk/metric v1.39.0/go.mod h1:xq9HEVH7qeX69/JnwEfp6fVq5wosJsY1mt4lLfYdVew=
|
go.opentelemetry.io/otel/sdk/metric v1.39.0/go.mod h1:xq9HEVH7qeX69/JnwEfp6fVq5wosJsY1mt4lLfYdVew=
|
||||||
go.opentelemetry.io/otel/trace v1.39.0 h1:2d2vfpEDmCJ5zVYz7ijaJdOF59xLomrvj7bjt6/qCJI=
|
go.opentelemetry.io/otel/trace v1.39.0 h1:2d2vfpEDmCJ5zVYz7ijaJdOF59xLomrvj7bjt6/qCJI=
|
||||||
go.opentelemetry.io/otel/trace v1.39.0/go.mod h1:88w4/PnZSazkGzz/w84VHpQafiU4EtqqlVdxWy+rNOA=
|
go.opentelemetry.io/otel/trace v1.39.0/go.mod h1:88w4/PnZSazkGzz/w84VHpQafiU4EtqqlVdxWy+rNOA=
|
||||||
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
|
||||||
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
|
|
||||||
go.uber.org/multierr v1.10.0 h1:S0h4aNzvfcFsC3dRF1jLoaov7oRaKqRGC/pUEJ2yvPQ=
|
go.uber.org/multierr v1.10.0 h1:S0h4aNzvfcFsC3dRF1jLoaov7oRaKqRGC/pUEJ2yvPQ=
|
||||||
go.uber.org/multierr v1.10.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
|
go.uber.org/multierr v1.10.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
|
||||||
go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8=
|
go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8=
|
||||||
|
|
@ -903,6 +828,8 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0
|
||||||
golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
|
golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
|
||||||
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
|
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
|
||||||
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
|
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
|
||||||
|
golang.org/x/exp v0.0.0-20230626212559-97b1e661b5df h1:UA2aFVmmsIlefxMk29Dp2juaUSth8Pyn3Tq5Y5mJGME=
|
||||||
|
golang.org/x/exp v0.0.0-20230626212559-97b1e661b5df/go.mod h1:FXUEEKJgO7OQYeo8N01OfiKP8RXMtf6e8aTskBGqWdc=
|
||||||
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b h1:M2rDM6z3Fhozi9O7NWsxAkg/yqS/lQJ6PmkyIV3YP+o=
|
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b h1:M2rDM6z3Fhozi9O7NWsxAkg/yqS/lQJ6PmkyIV3YP+o=
|
||||||
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b/go.mod h1:3//PLf8L/X+8b4vuAfHzxeRUl04Adcb341+IGKfnqS8=
|
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b/go.mod h1:3//PLf8L/X+8b4vuAfHzxeRUl04Adcb341+IGKfnqS8=
|
||||||
golang.org/x/exp/typeparams v0.0.0-20220428152302-39d4317da171/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk=
|
golang.org/x/exp/typeparams v0.0.0-20220428152302-39d4317da171/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk=
|
||||||
|
|
@ -940,7 +867,6 @@ golang.org/x/mod v0.33.0 h1:tHFzIWbBifEmbwtGz65eaWyGiGZatSrT9prnU8DbVL8=
|
||||||
golang.org/x/mod v0.33.0/go.mod h1:swjeQEj+6r7fODbD2cqrnje9PnziFuw4bmLbBZFrQ5w=
|
golang.org/x/mod v0.33.0/go.mod h1:swjeQEj+6r7fODbD2cqrnje9PnziFuw4bmLbBZFrQ5w=
|
||||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
|
||||||
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
|
@ -963,7 +889,6 @@ golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/
|
||||||
golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||||
golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||||
golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||||
golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
|
||||||
golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||||
golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
|
golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
|
||||||
golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
|
golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
|
||||||
|
|
@ -971,18 +896,16 @@ golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81R
|
||||||
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||||
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
|
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
|
||||||
golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk=
|
|
||||||
golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||||
golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||||
golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
|
|
||||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||||
golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY=
|
golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY=
|
||||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||||
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
||||||
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
|
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
|
||||||
golang.org/x/net v0.16.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
|
golang.org/x/net v0.16.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
|
||||||
golang.org/x/net v0.50.0 h1:ucWh9eiCGyDR3vtzso0WMQinm2Dnt8cFMuQa9K33J60=
|
golang.org/x/net v0.51.0 h1:94R/GTO7mt3/4wIKpcR5gkGmRLOuE/2hNGeWq/GBIFo=
|
||||||
golang.org/x/net v0.50.0/go.mod h1:UgoSli3F/pBgdJBHCTc+tp3gmrU4XswgGRgtnwWTfyM=
|
golang.org/x/net v0.51.0/go.mod h1:aamm+2QF5ogm02fjy5Bb7CQ0WMt1/WVM7FtyaTLlA9Y=
|
||||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||||
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||||
|
|
@ -1008,7 +931,6 @@ golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
|
||||||
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
|
||||||
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
|
@ -1019,10 +941,7 @@ golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7w
|
||||||
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
|
@ -1044,7 +963,6 @@ golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7w
|
||||||
golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
|
@ -1055,7 +973,6 @@ golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBc
|
||||||
golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20211105183446-c75c47738b0c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20211105183446-c75c47738b0c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|
||||||
golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
|
@ -1069,8 +986,6 @@ golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k=
|
golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k=
|
||||||
golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||||
golang.org/x/telemetry v0.0.0-20260209163413-e7419c687ee4 h1:bTLqdHv7xrGlFbvf5/TXNxy/iUwwdkjhqQTJDjW7aj0=
|
|
||||||
golang.org/x/telemetry v0.0.0-20260209163413-e7419c687ee4/go.mod h1:g5NllXBEermZrmR51cJDQxmJUHUOfRAaNyWBM+R+548=
|
|
||||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||||
golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc=
|
golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc=
|
||||||
|
|
@ -1139,7 +1054,6 @@ golang.org/x/tools v0.0.0-20200724022722-7017fd6b1305/go.mod h1:njjCfa9FT2d7l9Bc
|
||||||
golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
|
golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
|
||||||
golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
|
golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
|
||||||
golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
|
golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
|
||||||
golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
|
||||||
golang.org/x/tools v0.1.1-0.20210205202024-ef80cdb6ec6d/go.mod h1:9bzcO0MWcOuT0tm1iBGzDVPshzfwoVvREIui8C+MHqU=
|
golang.org/x/tools v0.1.1-0.20210205202024-ef80cdb6ec6d/go.mod h1:9bzcO0MWcOuT0tm1iBGzDVPshzfwoVvREIui8C+MHqU=
|
||||||
golang.org/x/tools v0.1.1-0.20210302220138-2ac05c832e1a/go.mod h1:9bzcO0MWcOuT0tm1iBGzDVPshzfwoVvREIui8C+MHqU=
|
golang.org/x/tools v0.1.1-0.20210302220138-2ac05c832e1a/go.mod h1:9bzcO0MWcOuT0tm1iBGzDVPshzfwoVvREIui8C+MHqU=
|
||||||
golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||||
|
|
@ -1150,10 +1064,6 @@ golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58
|
||||||
golang.org/x/tools v0.14.0/go.mod h1:uYBEerGOWcJyEORxN+Ek8+TT266gXkNlHdJBwexUsBg=
|
golang.org/x/tools v0.14.0/go.mod h1:uYBEerGOWcJyEORxN+Ek8+TT266gXkNlHdJBwexUsBg=
|
||||||
golang.org/x/tools v0.42.0 h1:uNgphsn75Tdz5Ji2q36v/nsFSfR/9BRFvqhGBaJGd5k=
|
golang.org/x/tools v0.42.0 h1:uNgphsn75Tdz5Ji2q36v/nsFSfR/9BRFvqhGBaJGd5k=
|
||||||
golang.org/x/tools v0.42.0/go.mod h1:Ma6lCIwGZvHK6XtgbswSoWroEkhugApmsXyrUmBhfr0=
|
golang.org/x/tools v0.42.0/go.mod h1:Ma6lCIwGZvHK6XtgbswSoWroEkhugApmsXyrUmBhfr0=
|
||||||
golang.org/x/tools/go/expect v0.1.1-deprecated h1:jpBZDwmgPhXsKZC6WhL20P4b/wmnpsEAGHaNy0n/rJM=
|
|
||||||
golang.org/x/tools/go/expect v0.1.1-deprecated/go.mod h1:eihoPOH+FgIqa3FpoTwguz/bVUSGBlGQU67vpBeOrBY=
|
|
||||||
golang.org/x/tools/go/packages/packagestest v0.1.1-deprecated h1:1h2MnaIAIXISqTFKdENegdpAgUXz6NrPEsbIeWaBRvM=
|
|
||||||
golang.org/x/tools/go/packages/packagestest v0.1.1-deprecated/go.mod h1:RVAQXBGNv1ib0J382/DPCRS/BPnsGebyM1Gj5VSDpG8=
|
|
||||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
|
@ -1213,8 +1123,8 @@ google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7Fc
|
||||||
google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
||||||
google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
||||||
google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
||||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20260209200024-4cfbd4190f57 h1:mWPCjDEyshlQYzBpMNHaEof6UX1PmHcaUODUywQ0uac=
|
google.golang.org/genproto/googleapis/rpc v0.0.0-20260226221140-a57be14db171 h1:ggcbiqK8WWh6l1dnltU4BgWGIGo+EVYxCaAPih/zQXQ=
|
||||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20260209200024-4cfbd4190f57/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ=
|
google.golang.org/genproto/googleapis/rpc v0.0.0-20260226221140-a57be14db171/go.mod h1:4Hqkh8ycfw05ld/3BWL7rJOSfebL2Q+DVDeRgYgxUU8=
|
||||||
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
||||||
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
|
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
|
||||||
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
|
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
|
||||||
|
|
@ -1227,8 +1137,8 @@ google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKa
|
||||||
google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk=
|
google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk=
|
||||||
google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
|
google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
|
||||||
google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
|
google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
|
||||||
google.golang.org/grpc v1.79.1 h1:zGhSi45ODB9/p3VAawt9a+O/MULLl9dpizzNNpq7flY=
|
google.golang.org/grpc v1.79.2 h1:fRMD94s2tITpyJGtBBn7MkMseNpOZU8ZxgC3MMBaXRU=
|
||||||
google.golang.org/grpc v1.79.1/go.mod h1:KmT0Kjez+0dde/v2j9vzwoAScgEPx/Bw1CYChhHLrHQ=
|
google.golang.org/grpc v1.79.2/go.mod h1:KmT0Kjez+0dde/v2j9vzwoAScgEPx/Bw1CYChhHLrHQ=
|
||||||
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
|
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
|
||||||
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
|
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
|
||||||
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
|
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
|
||||||
|
|
@ -1250,11 +1160,8 @@ gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8
|
||||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||||
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
||||||
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
|
|
||||||
gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
|
gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
|
||||||
gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||||
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
|
|
||||||
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
|
|
||||||
gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME=
|
gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME=
|
||||||
gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI=
|
gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI=
|
||||||
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
|
|
@ -1264,8 +1171,6 @@ gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
||||||
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||||
gopkg.in/yaml.v3 v3.0.0-20191026110619-0b21df46bc1d/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
|
||||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
|
||||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||||
|
|
|
||||||
|
|
@ -1,11 +0,0 @@
|
||||||
package testutil
|
|
||||||
|
|
||||||
import "testing"
|
|
||||||
|
|
||||||
func Equal[V comparable](t *testing.T, got, expected V) {
|
|
||||||
t.Helper()
|
|
||||||
|
|
||||||
if expected != got {
|
|
||||||
t.Errorf("assert equal failed:\ngot: %v \nexpected: %v", got, expected)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,651 +0,0 @@
|
||||||
package testutil
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/hashicorp/terraform-plugin-framework/providerserver"
|
|
||||||
"github.com/hashicorp/terraform-plugin-go/tfprotov6"
|
|
||||||
"github.com/hashicorp/terraform-plugin-testing/config"
|
|
||||||
"github.com/hashicorp/terraform-plugin-testing/echoprovider"
|
|
||||||
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
// Default location of credentials JSON
|
|
||||||
// credentialsFilePath = ".stackit/credentials.json" //nolint:gosec // linter false positive
|
|
||||||
serviceAccountFilePath = ".stackit/service_account.json"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
// TestAccProtoV6ProviderFactories is used to instantiate a provider during
|
|
||||||
// acceptance testing. The factory function will be invoked for every Terraform
|
|
||||||
// CLI command executed to create a provider server to which the CLI can
|
|
||||||
// reattach.
|
|
||||||
TestAccProtoV6ProviderFactories = map[string]func() (tfprotov6.ProviderServer, error){
|
|
||||||
"stackitprivatepreview": providerserver.NewProtocol6WithError(stackit.New("test-version")()),
|
|
||||||
}
|
|
||||||
|
|
||||||
// TestEphemeralAccProtoV6ProviderFactories is used to instantiate a provider during
|
|
||||||
// acceptance testing. The factory function will be invoked for every Terraform
|
|
||||||
// CLI command executed to create a provider server to which the CLI can
|
|
||||||
// reattach.
|
|
||||||
//
|
|
||||||
// See the Terraform acceptance test documentation on ephemeral resources for more information:
|
|
||||||
// https://developer.hashicorp.com/terraform/plugin/testing/acceptance-tests/ephemeral-resources
|
|
||||||
TestEphemeralAccProtoV6ProviderFactories = map[string]func() (tfprotov6.ProviderServer, error){
|
|
||||||
"stackitprivatepreview": providerserver.NewProtocol6WithError(stackit.New("test-version")()),
|
|
||||||
"echo": echoprovider.NewProviderServer(),
|
|
||||||
}
|
|
||||||
|
|
||||||
// E2ETestsEnabled checks if end-to-end tests should be run.
|
|
||||||
// It is enabled when the TF_ACC environment variable is set to "1".
|
|
||||||
E2ETestsEnabled = os.Getenv("TF_ACC") == "1"
|
|
||||||
// OrganizationId is the id of organization used for tests
|
|
||||||
OrganizationId = os.Getenv("TF_ACC_ORGANIZATION_ID")
|
|
||||||
// ProjectId is the id of project used for tests
|
|
||||||
ProjectId = os.Getenv("TF_ACC_PROJECT_ID")
|
|
||||||
Region = os.Getenv("TF_ACC_REGION")
|
|
||||||
// ServiceAccountFile is the json file of the service account
|
|
||||||
ServiceAccountFile = os.Getenv("TF_ACC_SERVICE_ACCOUNT_FILE")
|
|
||||||
// ServerId is the id of a server used for some tests
|
|
||||||
ServerId = getenv("TF_ACC_SERVER_ID", "")
|
|
||||||
// TestProjectParentContainerID is the container id of the parent resource under which projects are created as part of the resource-manager acceptance tests
|
|
||||||
TestProjectParentContainerID = os.Getenv("TF_ACC_TEST_PROJECT_PARENT_CONTAINER_ID")
|
|
||||||
// TestProjectParentUUID is the uuid of the parent resource under which projects are created as part of the resource-manager acceptance tests
|
|
||||||
TestProjectParentUUID = os.Getenv("TF_ACC_TEST_PROJECT_PARENT_UUID")
|
|
||||||
// TestProjectServiceAccountEmail is the e-mail of a service account with admin permissions on the organization under which projects are created as part of the resource-manager acceptance tests
|
|
||||||
TestProjectServiceAccountEmail = os.Getenv("TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL")
|
|
||||||
// TestProjectUserEmail is the e-mail of a user for the project created as part of the resource-manager acceptance tests
|
|
||||||
// Default email: acc-test@sa.stackit.cloud
|
|
||||||
TestProjectUserEmail = getenv("TF_ACC_TEST_PROJECT_USER_EMAIL", "acc-test@sa.stackit.cloud")
|
|
||||||
// TestImageLocalFilePath is the local path to an image file used for image acceptance tests
|
|
||||||
TestImageLocalFilePath = getenv("TF_ACC_TEST_IMAGE_LOCAL_FILE_PATH", "default")
|
|
||||||
|
|
||||||
CdnCustomEndpoint = os.Getenv("TF_ACC_CDN_CUSTOM_ENDPOINT")
|
|
||||||
DnsCustomEndpoint = os.Getenv("TF_ACC_DNS_CUSTOM_ENDPOINT")
|
|
||||||
GitCustomEndpoint = os.Getenv("TF_ACC_GIT_CUSTOM_ENDPOINT")
|
|
||||||
IaaSCustomEndpoint = os.Getenv("TF_ACC_IAAS_CUSTOM_ENDPOINT")
|
|
||||||
KMSCustomEndpoint = os.Getenv("TF_ACC_KMS_CUSTOM_ENDPOINT")
|
|
||||||
LoadBalancerCustomEndpoint = os.Getenv("TF_ACC_LOADBALANCER_CUSTOM_ENDPOINT")
|
|
||||||
LogMeCustomEndpoint = os.Getenv("TF_ACC_LOGME_CUSTOM_ENDPOINT")
|
|
||||||
MariaDBCustomEndpoint = os.Getenv("TF_ACC_MARIADB_CUSTOM_ENDPOINT")
|
|
||||||
ModelServingCustomEndpoint = os.Getenv("TF_ACC_MODELSERVING_CUSTOM_ENDPOINT")
|
|
||||||
AuthorizationCustomEndpoint = os.Getenv("TF_ACC_authorization_custom_endpoint")
|
|
||||||
MongoDBFlexCustomEndpoint = os.Getenv("TF_ACC_MONGODBFLEX_CUSTOM_ENDPOINT")
|
|
||||||
OpenSearchCustomEndpoint = os.Getenv("TF_ACC_OPENSEARCH_CUSTOM_ENDPOINT")
|
|
||||||
ObservabilityCustomEndpoint = os.Getenv("TF_ACC_OBSERVABILITY_CUSTOM_ENDPOINT")
|
|
||||||
ObjectStorageCustomEndpoint = os.Getenv("TF_ACC_OBJECTSTORAGE_CUSTOM_ENDPOINT")
|
|
||||||
PostgresFlexCustomEndpoint = os.Getenv("TF_ACC_POSTGRESFLEX_CUSTOM_ENDPOINT")
|
|
||||||
RabbitMQCustomEndpoint = os.Getenv("TF_ACC_RABBITMQ_CUSTOM_ENDPOINT")
|
|
||||||
RedisCustomEndpoint = os.Getenv("TF_ACC_REDIS_CUSTOM_ENDPOINT")
|
|
||||||
ResourceManagerCustomEndpoint = os.Getenv("TF_ACC_RESOURCEMANAGER_CUSTOM_ENDPOINT")
|
|
||||||
ScfCustomEndpoint = os.Getenv("TF_ACC_SCF_CUSTOM_ENDPOINT")
|
|
||||||
SecretsManagerCustomEndpoint = os.Getenv("TF_ACC_SECRETSMANAGER_CUSTOM_ENDPOINT")
|
|
||||||
SQLServerFlexCustomEndpoint = os.Getenv("TF_ACC_SQLSERVERFLEX_CUSTOM_ENDPOINT")
|
|
||||||
ServerBackupCustomEndpoint = os.Getenv("TF_ACC_SERVER_BACKUP_CUSTOM_ENDPOINT")
|
|
||||||
ServerUpdateCustomEndpoint = os.Getenv("TF_ACC_SERVER_UPDATE_CUSTOM_ENDPOINT")
|
|
||||||
ServiceAccountCustomEndpoint = os.Getenv("TF_ACC_SERVICE_ACCOUNT_CUSTOM_ENDPOINT")
|
|
||||||
SKECustomEndpoint = os.Getenv("TF_ACC_SKE_CUSTOM_ENDPOINT")
|
|
||||||
)
|
|
||||||
|
|
||||||
// Provider config helper functions
|
|
||||||
|
|
||||||
func ObservabilityProviderConfig() string {
|
|
||||||
if ObservabilityCustomEndpoint == "" {
|
|
||||||
return `provider "stackitprivatepreview" {
|
|
||||||
default_region = "eu01"
|
|
||||||
}`
|
|
||||||
}
|
|
||||||
return fmt.Sprintf(`
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
observability_custom_endpoint = "%s"
|
|
||||||
}`,
|
|
||||||
ObservabilityCustomEndpoint,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func CdnProviderConfig() string {
|
|
||||||
if CdnCustomEndpoint == "" {
|
|
||||||
return `
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
enable_beta_resources = true
|
|
||||||
}`
|
|
||||||
}
|
|
||||||
return fmt.Sprintf(`
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
cdn_custom_endpoint = "%s"
|
|
||||||
enable_beta_resources = true
|
|
||||||
}`,
|
|
||||||
CdnCustomEndpoint,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func DnsProviderConfig() string {
|
|
||||||
if DnsCustomEndpoint == "" {
|
|
||||||
return `provider "stackitprivatepreview" {}`
|
|
||||||
}
|
|
||||||
return fmt.Sprintf(`
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
dns_custom_endpoint = "%s"
|
|
||||||
}`,
|
|
||||||
DnsCustomEndpoint,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func IaaSProviderConfig() string {
|
|
||||||
if IaaSCustomEndpoint == "" {
|
|
||||||
return `
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
default_region = "eu01"
|
|
||||||
}`
|
|
||||||
}
|
|
||||||
return fmt.Sprintf(`
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
iaas_custom_endpoint = "%s"
|
|
||||||
}`,
|
|
||||||
IaaSCustomEndpoint,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func IaaSProviderConfigWithBetaResourcesEnabled() string {
|
|
||||||
if IaaSCustomEndpoint == "" {
|
|
||||||
return `
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
enable_beta_resources = true
|
|
||||||
default_region = "eu01"
|
|
||||||
}`
|
|
||||||
}
|
|
||||||
return fmt.Sprintf(`
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
enable_beta_resources = true
|
|
||||||
iaas_custom_endpoint = "%s"
|
|
||||||
}`,
|
|
||||||
IaaSCustomEndpoint,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func IaaSProviderConfigWithExperiments() string {
|
|
||||||
if IaaSCustomEndpoint == "" {
|
|
||||||
return `
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
default_region = "eu01"
|
|
||||||
experiments = [ "routing-tables", "network" ]
|
|
||||||
}`
|
|
||||||
}
|
|
||||||
return fmt.Sprintf(`
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
iaas_custom_endpoint = "%s"
|
|
||||||
experiments = [ "routing-tables", "network" ]
|
|
||||||
}`,
|
|
||||||
IaaSCustomEndpoint,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func KMSProviderConfig() string {
|
|
||||||
if KMSCustomEndpoint == "" {
|
|
||||||
return `
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
default_region = "eu01"
|
|
||||||
}`
|
|
||||||
}
|
|
||||||
return fmt.Sprintf(`
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
kms_custom_endpoint = "%s"
|
|
||||||
}`,
|
|
||||||
KMSCustomEndpoint,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func LoadBalancerProviderConfig() string {
|
|
||||||
if LoadBalancerCustomEndpoint == "" {
|
|
||||||
return `
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
default_region = "eu01"
|
|
||||||
}`
|
|
||||||
}
|
|
||||||
return fmt.Sprintf(`
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
loadbalancer_custom_endpoint = "%s"
|
|
||||||
}`,
|
|
||||||
LoadBalancerCustomEndpoint,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func LogMeProviderConfig() string {
|
|
||||||
if LogMeCustomEndpoint == "" {
|
|
||||||
return `
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
default_region = "eu01"
|
|
||||||
}`
|
|
||||||
}
|
|
||||||
return fmt.Sprintf(`
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
logme_custom_endpoint = "%s"
|
|
||||||
}`,
|
|
||||||
LogMeCustomEndpoint,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func MariaDBProviderConfig() string {
|
|
||||||
if MariaDBCustomEndpoint == "" {
|
|
||||||
return `
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
default_region = "eu01"
|
|
||||||
}`
|
|
||||||
}
|
|
||||||
return fmt.Sprintf(`
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
mariadb_custom_endpoint = "%s"
|
|
||||||
}`,
|
|
||||||
MariaDBCustomEndpoint,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func ModelServingProviderConfig() string {
|
|
||||||
if ModelServingCustomEndpoint == "" {
|
|
||||||
return `
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
default_region = "eu01"
|
|
||||||
}
|
|
||||||
`
|
|
||||||
}
|
|
||||||
return fmt.Sprintf(`
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
modelserving_custom_endpoint = "%s"
|
|
||||||
}`,
|
|
||||||
ModelServingCustomEndpoint,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func MongoDBFlexProviderConfig() string {
|
|
||||||
if MongoDBFlexCustomEndpoint == "" {
|
|
||||||
return `
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
default_region = "eu01"
|
|
||||||
}`
|
|
||||||
}
|
|
||||||
return fmt.Sprintf(`
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
mongodbflex_custom_endpoint = "%s"
|
|
||||||
}`,
|
|
||||||
MongoDBFlexCustomEndpoint,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func ObjectStorageProviderConfig() string {
|
|
||||||
if ObjectStorageCustomEndpoint == "" {
|
|
||||||
return `
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
default_region = "eu01"
|
|
||||||
}`
|
|
||||||
}
|
|
||||||
return fmt.Sprintf(`
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
objectstorage_custom_endpoint = "%s"
|
|
||||||
}`,
|
|
||||||
ObjectStorageCustomEndpoint,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func OpenSearchProviderConfig() string {
|
|
||||||
if OpenSearchCustomEndpoint == "" {
|
|
||||||
return `
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
default_region = "eu01"
|
|
||||||
}`
|
|
||||||
}
|
|
||||||
return fmt.Sprintf(`
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
opensearch_custom_endpoint = "%s"
|
|
||||||
}`,
|
|
||||||
OpenSearchCustomEndpoint,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func PostgresFlexProviderConfig(saFile string) string {
|
|
||||||
if PostgresFlexCustomEndpoint == "" {
|
|
||||||
return fmt.Sprintf(`
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
default_region = "eu01"
|
|
||||||
service_account_key_path = "%s"
|
|
||||||
}`, saFile)
|
|
||||||
}
|
|
||||||
return fmt.Sprintf(`
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
service_account_key_path = "%s"
|
|
||||||
postgresflex_custom_endpoint = "%s"
|
|
||||||
}`,
|
|
||||||
saFile,
|
|
||||||
PostgresFlexCustomEndpoint,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func RabbitMQProviderConfig() string {
|
|
||||||
if RabbitMQCustomEndpoint == "" {
|
|
||||||
return `
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
default_region = "eu01"
|
|
||||||
}`
|
|
||||||
}
|
|
||||||
return fmt.Sprintf(`
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
rabbitmq_custom_endpoint = "%s"
|
|
||||||
}`,
|
|
||||||
RabbitMQCustomEndpoint,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func RedisProviderConfig() string {
|
|
||||||
if RedisCustomEndpoint == "" {
|
|
||||||
return `
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
default_region = "eu01"
|
|
||||||
}`
|
|
||||||
}
|
|
||||||
return fmt.Sprintf(`
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
redis_custom_endpoint = "%s"
|
|
||||||
}`,
|
|
||||||
RedisCustomEndpoint,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func ResourceManagerProviderConfig() string {
|
|
||||||
key := GetTestProjectServiceAccountJson("")
|
|
||||||
if ResourceManagerCustomEndpoint == "" || AuthorizationCustomEndpoint == "" {
|
|
||||||
return fmt.Sprintf(`
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
service_account_key = "%s"
|
|
||||||
}`,
|
|
||||||
key,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
return fmt.Sprintf(`
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
resourcemanager_custom_endpoint = "%s"
|
|
||||||
authorization_custom_endpoint = "%s"
|
|
||||||
service_account_token = "%s"
|
|
||||||
}`,
|
|
||||||
ResourceManagerCustomEndpoint,
|
|
||||||
AuthorizationCustomEndpoint,
|
|
||||||
key,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func SecretsManagerProviderConfig() string {
|
|
||||||
if SecretsManagerCustomEndpoint == "" {
|
|
||||||
return `
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
default_region = "eu01"
|
|
||||||
}`
|
|
||||||
}
|
|
||||||
return fmt.Sprintf(`
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
secretsmanager_custom_endpoint = "%s"
|
|
||||||
}`,
|
|
||||||
SecretsManagerCustomEndpoint,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func SQLServerFlexProviderConfig(saFile string) string {
|
|
||||||
if SQLServerFlexCustomEndpoint == "" {
|
|
||||||
return fmt.Sprintf(`
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
default_region = "eu01"
|
|
||||||
service_account_key_path = "%s"
|
|
||||||
}`, saFile)
|
|
||||||
}
|
|
||||||
return fmt.Sprintf(`
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
service_account_key_path = "%s"
|
|
||||||
sqlserverflex_custom_endpoint = "%s"
|
|
||||||
}`,
|
|
||||||
saFile,
|
|
||||||
SQLServerFlexCustomEndpoint,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func ServerBackupProviderConfig() string {
|
|
||||||
if ServerBackupCustomEndpoint == "" {
|
|
||||||
return `
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
default_region = "eu01"
|
|
||||||
enable_beta_resources = true
|
|
||||||
}`
|
|
||||||
}
|
|
||||||
return fmt.Sprintf(`
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
server_backup_custom_endpoint = "%s"
|
|
||||||
enable_beta_resources = true
|
|
||||||
}`,
|
|
||||||
ServerBackupCustomEndpoint,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func ServerUpdateProviderConfig() string {
|
|
||||||
if ServerUpdateCustomEndpoint == "" {
|
|
||||||
return `
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
default_region = "eu01"
|
|
||||||
enable_beta_resources = true
|
|
||||||
}`
|
|
||||||
}
|
|
||||||
return fmt.Sprintf(`
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
server_update_custom_endpoint = "%s"
|
|
||||||
enable_beta_resources = true
|
|
||||||
}`,
|
|
||||||
ServerUpdateCustomEndpoint,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func SKEProviderConfig() string {
|
|
||||||
if SKECustomEndpoint == "" {
|
|
||||||
return `
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
default_region = "eu01"
|
|
||||||
}`
|
|
||||||
}
|
|
||||||
return fmt.Sprintf(`
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
ske_custom_endpoint = "%s"
|
|
||||||
}`,
|
|
||||||
SKECustomEndpoint,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func AuthorizationProviderConfig() string {
|
|
||||||
if AuthorizationCustomEndpoint == "" {
|
|
||||||
return `
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
default_region = "eu01"
|
|
||||||
experiments = ["iam"]
|
|
||||||
}`
|
|
||||||
}
|
|
||||||
return fmt.Sprintf(`
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
authorization_custom_endpoint = "%s"
|
|
||||||
experiments = ["iam"]
|
|
||||||
}`,
|
|
||||||
AuthorizationCustomEndpoint,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func ServiceAccountProviderConfig() string {
|
|
||||||
if ServiceAccountCustomEndpoint == "" {
|
|
||||||
return `
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
default_region = "eu01"
|
|
||||||
enable_beta_resources = true
|
|
||||||
}`
|
|
||||||
}
|
|
||||||
return fmt.Sprintf(`
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
service_account_custom_endpoint = "%s"
|
|
||||||
enable_beta_resources = true
|
|
||||||
}`,
|
|
||||||
ServiceAccountCustomEndpoint,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func GitProviderConfig() string {
|
|
||||||
if GitCustomEndpoint == "" {
|
|
||||||
return `
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
default_region = "eu01"
|
|
||||||
enable_beta_resources = true
|
|
||||||
}`
|
|
||||||
}
|
|
||||||
return fmt.Sprintf(`
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
git_custom_endpoint = "%s"
|
|
||||||
enable_beta_resources = true
|
|
||||||
}`,
|
|
||||||
GitCustomEndpoint,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func ScfProviderConfig() string {
|
|
||||||
if ScfCustomEndpoint == "" {
|
|
||||||
return `
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
default_region = "eu01"
|
|
||||||
}`
|
|
||||||
}
|
|
||||||
return fmt.Sprintf(`
|
|
||||||
provider "stackitprivatepreview" {
|
|
||||||
default_region = "eu01"
|
|
||||||
scf_custom_endpoint = "%s"
|
|
||||||
}`,
|
|
||||||
ScfCustomEndpoint,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func ResourceNameWithDateTime(name string) string {
|
|
||||||
dateTime := time.Now().Format(time.RFC3339)
|
|
||||||
// Remove timezone to have a smaller datetime
|
|
||||||
dateTimeTrimmed, _, _ := strings.Cut(dateTime, "+")
|
|
||||||
return fmt.Sprintf("tf-acc-%s-%s", name, dateTimeTrimmed)
|
|
||||||
}
|
|
||||||
|
|
||||||
func GetTestProjectServiceAccountJson(path string) string {
|
|
||||||
var err error
|
|
||||||
token, tokenSet := os.LookupEnv("TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_JSON")
|
|
||||||
if !tokenSet || token == "" {
|
|
||||||
token, err = readTestServiceAccountJsonFromFile(path)
|
|
||||||
if err != nil {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return token
|
|
||||||
}
|
|
||||||
|
|
||||||
//func GetTestProjectServiceAccountToken(path string) string {
|
|
||||||
// var err error
|
|
||||||
// token, tokenSet := os.LookupEnv("TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN")
|
|
||||||
// if !tokenSet || token == "" {
|
|
||||||
// token, err = readTestTokenFromCredentialsFile(path)
|
|
||||||
// if err != nil {
|
|
||||||
// return ""
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// return token
|
|
||||||
//}
|
|
||||||
//
|
|
||||||
//func readTestTokenFromCredentialsFile(path string) (string, error) {
|
|
||||||
// if path == "" {
|
|
||||||
// customPath, customPathSet := os.LookupEnv("STACKIT_CREDENTIALS_PATH")
|
|
||||||
// if !customPathSet || customPath == "" {
|
|
||||||
// path = credentialsFilePath
|
|
||||||
// home, err := os.UserHomeDir()
|
|
||||||
// if err != nil {
|
|
||||||
// return "", fmt.Errorf("getting home directory: %w", err)
|
|
||||||
// }
|
|
||||||
// path = filepath.Join(home, path)
|
|
||||||
// } else {
|
|
||||||
// path = customPath
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// credentialsRaw, err := os.ReadFile(path)
|
|
||||||
// if err != nil {
|
|
||||||
// return "", fmt.Errorf("opening file: %w", err)
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// var credentials struct {
|
|
||||||
// TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN string `json:"TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN"`
|
|
||||||
// }
|
|
||||||
// err = json.Unmarshal(credentialsRaw, &credentials)
|
|
||||||
// if err != nil {
|
|
||||||
// return "", fmt.Errorf("unmarshalling credentials: %w", err)
|
|
||||||
// }
|
|
||||||
// return credentials.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN, nil
|
|
||||||
//}
|
|
||||||
|
|
||||||
func readTestServiceAccountJsonFromFile(path string) (string, error) {
|
|
||||||
if path == "" {
|
|
||||||
customPath, customPathSet := os.LookupEnv("STACKIT_SERVICE_ACCOUNT_PATH")
|
|
||||||
if !customPathSet || customPath == "" {
|
|
||||||
path = serviceAccountFilePath
|
|
||||||
home, err := os.UserHomeDir()
|
|
||||||
if err != nil {
|
|
||||||
return "", fmt.Errorf("getting home directory: %w", err)
|
|
||||||
}
|
|
||||||
path = filepath.Join(home, path)
|
|
||||||
} else {
|
|
||||||
path = customPath
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
credentialsRaw, err := os.ReadFile(path)
|
|
||||||
if err != nil {
|
|
||||||
return "", fmt.Errorf("opening file: %w", err)
|
|
||||||
}
|
|
||||||
return string(credentialsRaw), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func getenv(key, defaultValue string) string {
|
|
||||||
val := os.Getenv(key)
|
|
||||||
if val == "" {
|
|
||||||
return defaultValue
|
|
||||||
}
|
|
||||||
return val
|
|
||||||
}
|
|
||||||
|
|
||||||
// CreateDefaultLocalFile is a helper for local_file_path. No real data is created
|
|
||||||
func CreateDefaultLocalFile() os.File {
|
|
||||||
// Define the file name and size
|
|
||||||
fileName := "test-512k.img"
|
|
||||||
size := 512 * 1024 // 512 KB
|
|
||||||
|
|
||||||
// Create the file
|
|
||||||
file, err := os.Create(fileName)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Seek to the desired position (512 KB)
|
|
||||||
_, err = file.Seek(int64(size), 0)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return *file
|
|
||||||
}
|
|
||||||
|
|
||||||
func ConvertConfigVariable(variable config.Variable) string {
|
|
||||||
tmpByteArray, _ := variable.MarshalJSON()
|
|
||||||
// In case the variable is a string, the quotes should be removed
|
|
||||||
if tmpByteArray[0] == '"' && tmpByteArray[len(tmpByteArray)-1] == '"' {
|
|
||||||
result := string(tmpByteArray[1 : len(tmpByteArray)-1])
|
|
||||||
// Replace escaped quotes which where added MarshalJSON
|
|
||||||
rawString := strings.ReplaceAll(result, `\"`, `"`)
|
|
||||||
return rawString
|
|
||||||
}
|
|
||||||
return string(tmpByteArray)
|
|
||||||
}
|
|
||||||
|
|
@ -1,50 +0,0 @@
|
||||||
// Copyright (c) STACKIT
|
|
||||||
|
|
||||||
package testutil
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/hashicorp/terraform-plugin-testing/config"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestConvertConfigVariable(t *testing.T) {
|
|
||||||
tests := []struct {
|
|
||||||
name string
|
|
||||||
variable config.Variable
|
|
||||||
want string
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "string",
|
|
||||||
variable: config.StringVariable("test"),
|
|
||||||
want: "test",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "bool: true",
|
|
||||||
variable: config.BoolVariable(true),
|
|
||||||
want: "true",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "bool: false",
|
|
||||||
variable: config.BoolVariable(false),
|
|
||||||
want: "false",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "integer",
|
|
||||||
variable: config.IntegerVariable(10),
|
|
||||||
want: "10",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "quoted string",
|
|
||||||
variable: config.StringVariable(`instance =~ ".*"`),
|
|
||||||
want: `instance =~ ".*"`,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
for _, tt := range tests {
|
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
|
||||||
if got := ConvertConfigVariable(tt.variable); got != tt.want {
|
|
||||||
t.Errorf("ConvertConfigVariable() = %v, want %v", got, tt.want)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -53,9 +53,9 @@ func CreateTemporaryHome(createValidCredentialsFile bool, t *testing.T) string {
|
||||||
|
|
||||||
// Define content, default = invalid token
|
// Define content, default = invalid token
|
||||||
token := "foo_token"
|
token := "foo_token"
|
||||||
if createValidCredentialsFile {
|
//if createValidCredentialsFile {
|
||||||
token = GetTestProjectServiceAccountJson("")
|
// token = GetTestProjectServiceAccountJson("")
|
||||||
}
|
//}
|
||||||
if _, err = file.WriteString(token); err != nil {
|
if _, err = file.WriteString(token); err != nil {
|
||||||
t.Fatalf("Error writing to file: %v", err)
|
t.Fatalf("Error writing to file: %v", err)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -293,25 +293,24 @@ func RedisProviderConfig() string {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
func ResourceManagerProviderConfig() string {
|
func ResourceManagerProviderConfig(saKeyPath string) string {
|
||||||
key := GetTestProjectServiceAccountJson("")
|
|
||||||
if ResourceManagerCustomEndpoint == "" || AuthorizationCustomEndpoint == "" {
|
if ResourceManagerCustomEndpoint == "" || AuthorizationCustomEndpoint == "" {
|
||||||
return fmt.Sprintf(`
|
return fmt.Sprintf(`
|
||||||
provider "stackitprivatepreview" {
|
provider "stackitprivatepreview" {
|
||||||
service_account_key = "%s"
|
service_account_key_path = "%s"
|
||||||
}`,
|
}`,
|
||||||
key,
|
saKeyPath,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
return fmt.Sprintf(`
|
return fmt.Sprintf(`
|
||||||
provider "stackitprivatepreview" {
|
provider "stackitprivatepreview" {
|
||||||
resourcemanager_custom_endpoint = "%s"
|
resourcemanager_custom_endpoint = "%s"
|
||||||
authorization_custom_endpoint = "%s"
|
authorization_custom_endpoint = "%s"
|
||||||
service_account_token = "%s"
|
service_account_key_path = "%s"
|
||||||
}`,
|
}`,
|
||||||
ResourceManagerCustomEndpoint,
|
ResourceManagerCustomEndpoint,
|
||||||
AuthorizationCustomEndpoint,
|
AuthorizationCustomEndpoint,
|
||||||
key,
|
saKeyPath,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,6 @@ import (
|
||||||
"log/slog"
|
"log/slog"
|
||||||
"os"
|
"os"
|
||||||
"os/exec"
|
"os/exec"
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
|
@ -20,9 +19,8 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
// Default location of credentials JSON
|
// Default location of service account JSON
|
||||||
// credentialsFilePath = ".stackit/credentials.json" //nolint:gosec // linter false positive
|
serviceAccountFilePath = "service_account.json"
|
||||||
serviceAccountFilePath = ".stackit/service_account.json"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
|
@ -101,17 +99,17 @@ func ResourceNameWithDateTime(name string) string {
|
||||||
return fmt.Sprintf("tf-acc-%s-%s", name, dateTimeTrimmed)
|
return fmt.Sprintf("tf-acc-%s-%s", name, dateTimeTrimmed)
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetTestProjectServiceAccountJson(path string) string {
|
//func GetTestProjectServiceAccountJson(path string) string {
|
||||||
var err error
|
// var err error
|
||||||
token, tokenSet := os.LookupEnv("TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_JSON")
|
// json, ok := os.LookupEnv("TF_ACC_SERVICE_ACCOUNT_JSON_CONTENT")
|
||||||
if !tokenSet || token == "" {
|
// if !ok || json == "" {
|
||||||
token, err = readTestServiceAccountJsonFromFile(path)
|
// json, err = readTestServiceAccountJsonFromFile(path)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return ""
|
// return ""
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
return token
|
// return json
|
||||||
}
|
//}
|
||||||
|
|
||||||
// func GetTestProjectServiceAccountToken(path string) string {
|
// func GetTestProjectServiceAccountToken(path string) string {
|
||||||
// var err error
|
// var err error
|
||||||
|
|
@ -155,27 +153,30 @@ func GetTestProjectServiceAccountJson(path string) string {
|
||||||
// return credentials.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN, nil
|
// return credentials.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN, nil
|
||||||
//}
|
//}
|
||||||
|
|
||||||
func readTestServiceAccountJsonFromFile(path string) (string, error) {
|
//func readTestServiceAccountJsonFromFile(path string) (string, error) {
|
||||||
if path == "" {
|
// if path == "" {
|
||||||
customPath, customPathSet := os.LookupEnv("STACKIT_SERVICE_ACCOUNT_PATH")
|
// customPath, ok := os.LookupEnv("TF_ACC_SERVICE_ACCOUNT_FILE")
|
||||||
if !customPathSet || customPath == "" {
|
// if !ok || customPath == "" {
|
||||||
path = serviceAccountFilePath
|
// path = serviceAccountFilePath
|
||||||
home, err := os.UserHomeDir()
|
// // TODO: check if we want to handle this with a home dir
|
||||||
if err != nil {
|
// /*
|
||||||
return "", fmt.Errorf("getting home directory: %w", err)
|
// home, err := os.UserHomeDir()
|
||||||
}
|
// if err != nil {
|
||||||
path = filepath.Join(home, path)
|
// return "", fmt.Errorf("getting home directory: %w", err)
|
||||||
} else {
|
// }
|
||||||
path = customPath
|
// path = filepath.Join(home, path)
|
||||||
}
|
// */
|
||||||
}
|
// } else {
|
||||||
|
// path = customPath
|
||||||
credentialsRaw, err := os.ReadFile(path)
|
// }
|
||||||
if err != nil {
|
// }
|
||||||
return "", fmt.Errorf("opening file: %w", err)
|
//
|
||||||
}
|
// credentialsRaw, err := os.ReadFile(path)
|
||||||
return string(credentialsRaw), nil
|
// if err != nil {
|
||||||
}
|
// return "", fmt.Errorf("opening file: %w", err)
|
||||||
|
// }
|
||||||
|
// return string(credentialsRaw), nil
|
||||||
|
//}
|
||||||
|
|
||||||
func getenv(key, defaultValue string) string {
|
func getenv(key, defaultValue string) string {
|
||||||
val := os.Getenv(key)
|
val := os.Getenv(key)
|
||||||
|
|
|
||||||
3
service_specs/postgres-flex/generator_settings.yml
Normal file
3
service_specs/postgres-flex/generator_settings.yml
Normal file
|
|
@ -0,0 +1,3 @@
|
||||||
|
versions:
|
||||||
|
- name: alpha
|
||||||
|
path: v3alpha1
|
||||||
5
service_specs/sqlserverflex/generator_settings.yml
Normal file
5
service_specs/sqlserverflex/generator_settings.yml
Normal file
|
|
@ -0,0 +1,5 @@
|
||||||
|
versions:
|
||||||
|
- name: alpha
|
||||||
|
path: v3alpha1
|
||||||
|
- name: beta
|
||||||
|
path: v3beta1
|
||||||
|
|
@ -11,13 +11,13 @@ import (
|
||||||
"github.com/hashicorp/terraform-plugin-framework/types"
|
"github.com/hashicorp/terraform-plugin-framework/types"
|
||||||
"github.com/hashicorp/terraform-plugin-log/tflog"
|
"github.com/hashicorp/terraform-plugin-log/tflog"
|
||||||
|
|
||||||
|
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
|
||||||
postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/datasources_gen"
|
pgDsGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/datasources_gen"
|
||||||
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
|
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
|
||||||
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Ensure the implementation satisfies the expected interfaces.
|
// Ensure the implementation satisfies the expected interfaces.
|
||||||
|
|
@ -32,13 +32,13 @@ func NewDatabaseDataSource() datasource.DataSource {
|
||||||
|
|
||||||
// dataSourceModel maps the data source schema data.
|
// dataSourceModel maps the data source schema data.
|
||||||
type dataSourceModel struct {
|
type dataSourceModel struct {
|
||||||
postgresflexalpha2.DatabaseModel
|
pgDsGen.DatabaseModel
|
||||||
TerraformID types.String `tfsdk:"id"`
|
TerraformID types.String `tfsdk:"id"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// databaseDataSource is the data source implementation.
|
// databaseDataSource is the data source implementation.
|
||||||
type databaseDataSource struct {
|
type databaseDataSource struct {
|
||||||
client *postgresflexalpha.APIClient
|
client *v3alpha1api.APIClient
|
||||||
providerData core.ProviderData
|
providerData core.ProviderData
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -73,7 +73,7 @@ func (r *databaseDataSource) Configure(
|
||||||
|
|
||||||
// Schema defines the schema for the data source.
|
// Schema defines the schema for the data source.
|
||||||
func (r *databaseDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
|
func (r *databaseDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
|
||||||
s := postgresflexalpha2.DatabaseDataSourceSchema(ctx)
|
s := pgDsGen.DatabaseDataSourceSchema(ctx)
|
||||||
s.Attributes["id"] = schema.StringAttribute{
|
s.Attributes["id"] = schema.StringAttribute{
|
||||||
Description: "Terraform's internal resource ID. It is structured as \\\"`project_id`,`region`,`instance_id`," +
|
Description: "Terraform's internal resource ID. It is structured as \\\"`project_id`,`region`,`instance_id`," +
|
||||||
"`database_id`\\\".\",",
|
"`database_id`\\\".\",",
|
||||||
|
|
@ -144,7 +144,7 @@ func (r *databaseDataSource) getDatabaseByNameOrID(
|
||||||
model *dataSourceModel,
|
model *dataSourceModel,
|
||||||
projectId, region, instanceId string,
|
projectId, region, instanceId string,
|
||||||
diags *diag.Diagnostics,
|
diags *diag.Diagnostics,
|
||||||
) (*postgresflexalpha.ListDatabase, error) {
|
) (*v3alpha1api.ListDatabase, error) {
|
||||||
isIdSet := !model.DatabaseId.IsNull() && !model.DatabaseId.IsUnknown()
|
isIdSet := !model.DatabaseId.IsNull() && !model.DatabaseId.IsUnknown()
|
||||||
isNameSet := !model.Name.IsNull() && !model.Name.IsUnknown()
|
isNameSet := !model.Name.IsNull() && !model.Name.IsUnknown()
|
||||||
|
|
||||||
|
|
@ -159,12 +159,12 @@ func (r *databaseDataSource) getDatabaseByNameOrID(
|
||||||
if isIdSet {
|
if isIdSet {
|
||||||
databaseId := model.DatabaseId.ValueInt64()
|
databaseId := model.DatabaseId.ValueInt64()
|
||||||
ctx = tflog.SetField(ctx, "database_id", databaseId)
|
ctx = tflog.SetField(ctx, "database_id", databaseId)
|
||||||
return getDatabaseById(ctx, r.client, projectId, region, instanceId, databaseId)
|
return getDatabaseById(ctx, r.client.DefaultAPI, projectId, region, instanceId, databaseId)
|
||||||
}
|
}
|
||||||
|
|
||||||
databaseName := model.Name.ValueString()
|
databaseName := model.Name.ValueString()
|
||||||
ctx = tflog.SetField(ctx, "name", databaseName)
|
ctx = tflog.SetField(ctx, "name", databaseName)
|
||||||
return getDatabaseByName(ctx, r.client, projectId, region, instanceId, databaseName)
|
return getDatabaseByName(ctx, r.client.DefaultAPI, projectId, region, instanceId, databaseName)
|
||||||
}
|
}
|
||||||
|
|
||||||
// handleReadError centralizes API error handling for the Read operation.
|
// handleReadError centralizes API error handling for the Read operation.
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,7 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
|
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
)
|
)
|
||||||
|
|
||||||
// databaseClientReader represents the contract to listing databases from postgresflex.APIClient.
|
// databaseClientReader represents the contract to listing databases from postgresflex.APIClient.
|
||||||
|
|
@ -15,7 +15,7 @@ type databaseClientReader interface {
|
||||||
projectId string,
|
projectId string,
|
||||||
region string,
|
region string,
|
||||||
instanceId string,
|
instanceId string,
|
||||||
) postgresflex.ApiListDatabasesRequestRequest
|
) v3alpha1api.ApiListDatabasesRequestRequest
|
||||||
}
|
}
|
||||||
|
|
||||||
// getDatabaseById gets a database by its ID.
|
// getDatabaseById gets a database by its ID.
|
||||||
|
|
@ -24,9 +24,9 @@ func getDatabaseById(
|
||||||
client databaseClientReader,
|
client databaseClientReader,
|
||||||
projectId, region, instanceId string,
|
projectId, region, instanceId string,
|
||||||
databaseId int64,
|
databaseId int64,
|
||||||
) (*postgresflex.ListDatabase, error) {
|
) (*v3alpha1api.ListDatabase, error) {
|
||||||
filter := func(db postgresflex.ListDatabase) bool {
|
filter := func(db v3alpha1api.ListDatabase) bool {
|
||||||
return db.Id != nil && *db.Id == databaseId
|
return int64(db.Id) == databaseId
|
||||||
}
|
}
|
||||||
return getDatabase(ctx, client, projectId, region, instanceId, filter)
|
return getDatabase(ctx, client, projectId, region, instanceId, filter)
|
||||||
}
|
}
|
||||||
|
|
@ -36,9 +36,9 @@ func getDatabaseByName(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
client databaseClientReader,
|
client databaseClientReader,
|
||||||
projectId, region, instanceId, databaseName string,
|
projectId, region, instanceId, databaseName string,
|
||||||
) (*postgresflex.ListDatabase, error) {
|
) (*v3alpha1api.ListDatabase, error) {
|
||||||
filter := func(db postgresflex.ListDatabase) bool {
|
filter := func(db v3alpha1api.ListDatabase) bool {
|
||||||
return db.Name != nil && *db.Name == databaseName
|
return db.Name == databaseName
|
||||||
}
|
}
|
||||||
return getDatabase(ctx, client, projectId, region, instanceId, filter)
|
return getDatabase(ctx, client, projectId, region, instanceId, filter)
|
||||||
}
|
}
|
||||||
|
|
@ -49,8 +49,8 @@ func getDatabase(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
client databaseClientReader,
|
client databaseClientReader,
|
||||||
projectId, region, instanceId string,
|
projectId, region, instanceId string,
|
||||||
filter func(db postgresflex.ListDatabase) bool,
|
filter func(db v3alpha1api.ListDatabase) bool,
|
||||||
) (*postgresflex.ListDatabase, error) {
|
) (*v3alpha1api.ListDatabase, error) {
|
||||||
if projectId == "" || region == "" || instanceId == "" {
|
if projectId == "" || region == "" || instanceId == "" {
|
||||||
return nil, fmt.Errorf("all parameters (project, region, instance) are required")
|
return nil, fmt.Errorf("all parameters (project, region, instance) are required")
|
||||||
}
|
}
|
||||||
|
|
@ -59,18 +59,18 @@ func getDatabase(
|
||||||
|
|
||||||
for page := int32(1); ; page++ {
|
for page := int32(1); ; page++ {
|
||||||
res, err := client.ListDatabasesRequest(ctx, projectId, region, instanceId).
|
res, err := client.ListDatabasesRequest(ctx, projectId, region, instanceId).
|
||||||
Page(page).Size(pageSize).Sort(postgresflex.DATABASESORT_DATABASE_ID_ASC).Execute()
|
Page(page).Size(pageSize).Sort(v3alpha1api.DATABASESORT_DATABASE_ID_ASC).Execute()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("requesting database list (page %d): %w", page, err)
|
return nil, fmt.Errorf("requesting database list (page %d): %w", page, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// If the API returns no databases, we have reached the end of the list.
|
// If the API returns no databases, we have reached the end of the list.
|
||||||
if res.Databases == nil || len(*res.Databases) == 0 {
|
if len(res.Databases) == 0 {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
// Iterate over databases to find a match
|
// Iterate over databases to find a match
|
||||||
for _, db := range *res.Databases {
|
for _, db := range res.Databases {
|
||||||
if filter(db) {
|
if filter(db) {
|
||||||
foundDb := db
|
foundDb := db
|
||||||
return &foundDb, nil
|
return &foundDb, nil
|
||||||
|
|
@ -82,10 +82,6 @@ func getDatabase(
|
||||||
}
|
}
|
||||||
|
|
||||||
// cleanString removes leading and trailing quotes which are sometimes returned by the API.
|
// cleanString removes leading and trailing quotes which are sometimes returned by the API.
|
||||||
func cleanString(s *string) *string {
|
func cleanString(s string) string {
|
||||||
if s == nil {
|
return strings.Trim(s, "\"")
|
||||||
return nil
|
|
||||||
}
|
|
||||||
res := strings.Trim(*s, "\"")
|
|
||||||
return &res
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -5,127 +5,99 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/google/go-cmp/cmp"
|
"github.com/google/go-cmp/cmp"
|
||||||
"github.com/stackitcloud/stackit-sdk-go/core/utils"
|
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
|
|
||||||
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type mockRequest struct {
|
|
||||||
executeFunc func() (*postgresflex.ListDatabasesResponse, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *mockRequest) Page(_ int32) postgresflex.ApiListDatabasesRequestRequest { return m }
|
|
||||||
func (m *mockRequest) Size(_ int32) postgresflex.ApiListDatabasesRequestRequest { return m }
|
|
||||||
func (m *mockRequest) Sort(_ postgresflex.DatabaseSort) postgresflex.ApiListDatabasesRequestRequest {
|
|
||||||
return m
|
|
||||||
}
|
|
||||||
func (m *mockRequest) Execute() (*postgresflex.ListDatabasesResponse, error) {
|
|
||||||
return m.executeFunc()
|
|
||||||
}
|
|
||||||
|
|
||||||
type mockDBClient struct {
|
|
||||||
executeRequest func() postgresflex.ApiListDatabasesRequestRequest
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ databaseClientReader = (*mockDBClient)(nil)
|
|
||||||
|
|
||||||
func (m *mockDBClient) ListDatabasesRequest(
|
|
||||||
_ context.Context,
|
|
||||||
_, _, _ string,
|
|
||||||
) postgresflex.ApiListDatabasesRequestRequest {
|
|
||||||
return m.executeRequest()
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestGetDatabase(t *testing.T) {
|
func TestGetDatabase(t *testing.T) {
|
||||||
mockResp := func(page int64) (*postgresflex.ListDatabasesResponse, error) {
|
mockResp := func(page int32) (*v3alpha1api.ListDatabasesResponse, error) {
|
||||||
if page == 1 {
|
if page == 1 {
|
||||||
return &postgresflex.ListDatabasesResponse{
|
return &v3alpha1api.ListDatabasesResponse{
|
||||||
Databases: &[]postgresflex.ListDatabase{
|
Databases: []v3alpha1api.ListDatabase{
|
||||||
{Id: utils.Ptr(int64(1)), Name: utils.Ptr("first")},
|
{Id: int32(1), Name: "first"},
|
||||||
{Id: utils.Ptr(int64(2)), Name: utils.Ptr("second")},
|
{Id: int32(2), Name: "second"},
|
||||||
},
|
},
|
||||||
Pagination: &postgresflex.Pagination{
|
Pagination: v3alpha1api.Pagination{
|
||||||
Page: utils.Ptr(int64(1)),
|
Page: int32(1),
|
||||||
TotalPages: utils.Ptr(int64(2)),
|
TotalPages: int32(2),
|
||||||
Size: utils.Ptr(int64(3)),
|
Size: int32(3),
|
||||||
},
|
},
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if page == 2 {
|
if page == 2 {
|
||||||
return &postgresflex.ListDatabasesResponse{
|
return &v3alpha1api.ListDatabasesResponse{
|
||||||
Databases: &[]postgresflex.ListDatabase{{Id: utils.Ptr(int64(3)), Name: utils.Ptr("three")}},
|
Databases: []v3alpha1api.ListDatabase{{Id: int32(3), Name: "three"}},
|
||||||
Pagination: &postgresflex.Pagination{
|
Pagination: v3alpha1api.Pagination{
|
||||||
Page: utils.Ptr(int64(2)),
|
Page: int32(2),
|
||||||
TotalPages: utils.Ptr(int64(2)),
|
TotalPages: int32(2),
|
||||||
Size: utils.Ptr(int64(3)),
|
Size: int32(3),
|
||||||
},
|
},
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return &postgresflex.ListDatabasesResponse{
|
return &v3alpha1api.ListDatabasesResponse{
|
||||||
Databases: &[]postgresflex.ListDatabase{},
|
Databases: []v3alpha1api.ListDatabase{},
|
||||||
Pagination: &postgresflex.Pagination{
|
Pagination: v3alpha1api.Pagination{
|
||||||
Page: utils.Ptr(int64(3)),
|
Page: int32(3),
|
||||||
TotalPages: utils.Ptr(int64(2)),
|
TotalPages: int32(2),
|
||||||
Size: utils.Ptr(int64(3)),
|
Size: int32(3),
|
||||||
},
|
},
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
description string
|
description string
|
||||||
projectId string
|
projectID string
|
||||||
region string
|
region string
|
||||||
instanceId string
|
instanceID string
|
||||||
wantErr bool
|
wantErr bool
|
||||||
wantDbName string
|
wantDbName string
|
||||||
wantDbId int64
|
wantDbID int32
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
description: "Success - Found by name on first page",
|
description: "Success - Found by name on first page",
|
||||||
projectId: "pid", region: "reg", instanceId: "inst",
|
projectID: "pid", region: "reg", instanceID: "inst",
|
||||||
wantErr: false,
|
wantErr: false,
|
||||||
wantDbName: "second",
|
wantDbName: "second",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
description: "Success - Found by id on first page",
|
description: "Success - Found by id on first page",
|
||||||
projectId: "pid", region: "reg", instanceId: "inst",
|
projectID: "pid", region: "reg", instanceID: "inst",
|
||||||
wantErr: false,
|
wantErr: false,
|
||||||
wantDbId: 2,
|
wantDbID: 2,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
description: "Success - Found by name on second page",
|
description: "Success - Found by name on second page",
|
||||||
projectId: "pid", region: "reg", instanceId: "inst",
|
projectID: "pid", region: "reg", instanceID: "inst",
|
||||||
wantErr: false,
|
wantErr: false,
|
||||||
wantDbName: "three",
|
wantDbName: "three",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
description: "Success - Found by id on second page",
|
description: "Success - Found by id on second page",
|
||||||
projectId: "pid", region: "reg", instanceId: "inst",
|
projectID: "pid", region: "reg", instanceID: "inst",
|
||||||
wantErr: false,
|
wantErr: false,
|
||||||
wantDbId: 1,
|
wantDbID: 1,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
description: "Error - API failure",
|
description: "Error - API failure",
|
||||||
projectId: "pid", region: "reg", instanceId: "inst",
|
projectID: "pid", region: "reg", instanceID: "inst",
|
||||||
wantErr: true,
|
wantErr: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
description: "Error - Missing parameters",
|
description: "Error - Missing parameters",
|
||||||
projectId: "", region: "reg", instanceId: "inst",
|
projectID: "", region: "reg", instanceID: "inst",
|
||||||
wantErr: true,
|
wantErr: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
description: "Error - Search by name not found after all pages",
|
description: "Error - Search by name not found after all pages",
|
||||||
projectId: "pid", region: "reg", instanceId: "inst",
|
projectID: "pid", region: "reg", instanceID: "inst",
|
||||||
wantDbName: "non-existent",
|
wantDbName: "non-existent",
|
||||||
wantErr: true,
|
wantErr: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
description: "Error - Search by id not found after all pages",
|
description: "Error - Search by id not found after all pages",
|
||||||
projectId: "pid", region: "reg", instanceId: "inst",
|
projectID: "pid", region: "reg", instanceID: "inst",
|
||||||
wantDbId: 999999,
|
wantDbID: 999999,
|
||||||
wantErr: true,
|
wantErr: true,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
@ -133,47 +105,46 @@ func TestGetDatabase(t *testing.T) {
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(
|
t.Run(
|
||||||
tt.description, func(t *testing.T) {
|
tt.description, func(t *testing.T) {
|
||||||
var currentPage int64
|
var currentPage int32
|
||||||
client := &mockDBClient{
|
|
||||||
executeRequest: func() postgresflex.ApiListDatabasesRequestRequest {
|
mockCall := func(_ v3alpha1api.ApiListDatabasesRequestRequest) (*v3alpha1api.ListDatabasesResponse, error) {
|
||||||
return &mockRequest{
|
currentPage++
|
||||||
executeFunc: func() (*postgresflex.ListDatabasesResponse, error) {
|
return mockResp(currentPage)
|
||||||
currentPage++
|
|
||||||
return mockResp(currentPage)
|
|
||||||
},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var actual *postgresflex.ListDatabase
|
client := &v3alpha1api.DefaultAPIServiceMock{
|
||||||
|
ListDatabasesRequestExecuteMock: &mockCall,
|
||||||
|
}
|
||||||
|
|
||||||
|
var actual *v3alpha1api.ListDatabase
|
||||||
var errDB error
|
var errDB error
|
||||||
|
|
||||||
if tt.wantDbName != "" {
|
if tt.wantDbName != "" {
|
||||||
actual, errDB = getDatabaseByName(
|
actual, errDB = getDatabaseByName(
|
||||||
t.Context(),
|
t.Context(),
|
||||||
client,
|
client,
|
||||||
tt.projectId,
|
tt.projectID,
|
||||||
tt.region,
|
tt.region,
|
||||||
tt.instanceId,
|
tt.instanceID,
|
||||||
tt.wantDbName,
|
tt.wantDbName,
|
||||||
)
|
)
|
||||||
} else if tt.wantDbId != 0 {
|
} else if tt.wantDbID != 0 {
|
||||||
actual, errDB = getDatabaseById(
|
actual, errDB = getDatabaseById(
|
||||||
t.Context(),
|
t.Context(),
|
||||||
client,
|
client,
|
||||||
tt.projectId,
|
tt.projectID,
|
||||||
tt.region,
|
tt.region,
|
||||||
tt.instanceId,
|
tt.instanceID,
|
||||||
tt.wantDbId,
|
int64(tt.wantDbID),
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
actual, errDB = getDatabase(
|
actual, errDB = getDatabase(
|
||||||
context.Background(),
|
context.Background(),
|
||||||
client,
|
client,
|
||||||
tt.projectId,
|
tt.projectID,
|
||||||
tt.region,
|
tt.region,
|
||||||
tt.instanceId,
|
tt.instanceID,
|
||||||
func(_ postgresflex.ListDatabase) bool { return false },
|
func(_ v3alpha1api.ListDatabase) bool { return false },
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -182,14 +153,14 @@ func TestGetDatabase(t *testing.T) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if !tt.wantErr && tt.wantDbName != "" && actual != nil {
|
if !tt.wantErr && tt.wantDbName != "" && actual != nil {
|
||||||
if *actual.Name != tt.wantDbName {
|
if actual.Name != tt.wantDbName {
|
||||||
t.Errorf("getDatabaseByNameOrID() got name = %v, want %v", *actual.Name, tt.wantDbName)
|
t.Errorf("getDatabaseByNameOrID() got name = %v, want %v", actual.Name, tt.wantDbName)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if !tt.wantErr && tt.wantDbId != 0 && actual != nil {
|
if !tt.wantErr && tt.wantDbID != 0 && actual != nil {
|
||||||
if *actual.Id != tt.wantDbId {
|
if actual.Id != tt.wantDbID {
|
||||||
t.Errorf("getDatabaseByNameOrID() got id = %v, want %v", *actual.Id, tt.wantDbId)
|
t.Errorf("getDatabaseByNameOrID() got id = %v, want %v", actual.Id, tt.wantDbID)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
@ -200,23 +171,18 @@ func TestGetDatabase(t *testing.T) {
|
||||||
func TestCleanString(t *testing.T) {
|
func TestCleanString(t *testing.T) {
|
||||||
testcases := []struct {
|
testcases := []struct {
|
||||||
name string
|
name string
|
||||||
given *string
|
given string
|
||||||
expected *string
|
expected string
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "should remove quotes",
|
name: "should remove quotes",
|
||||||
given: utils.Ptr("\"quoted\""),
|
given: "\"quoted\"",
|
||||||
expected: utils.Ptr("quoted"),
|
expected: "quoted",
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "should handle nil",
|
|
||||||
given: nil,
|
|
||||||
expected: nil,
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "should not change unquoted string",
|
name: "should not change unquoted string",
|
||||||
given: utils.Ptr("unquoted"),
|
given: "unquoted",
|
||||||
expected: utils.Ptr("unquoted"),
|
expected: "unquoted",
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -5,21 +5,21 @@ import (
|
||||||
"strconv"
|
"strconv"
|
||||||
|
|
||||||
"github.com/hashicorp/terraform-plugin-framework/types"
|
"github.com/hashicorp/terraform-plugin-framework/types"
|
||||||
|
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
// mapFields maps fields from a ListDatabase API response to a resourceModel for the data source.
|
// mapFields maps fields from a ListDatabase API response to a resourceModel for the data source.
|
||||||
func mapFields(
|
func mapFields(
|
||||||
source *postgresflexalpha.ListDatabase,
|
source *v3alpha1api.ListDatabase,
|
||||||
model *dataSourceModel,
|
model *dataSourceModel,
|
||||||
region string,
|
region string,
|
||||||
) error {
|
) error {
|
||||||
if source == nil {
|
if source == nil {
|
||||||
return fmt.Errorf("response is nil")
|
return fmt.Errorf("response is nil")
|
||||||
}
|
}
|
||||||
if source.Id == nil || *source.Id == 0 {
|
if source.Id == 0 {
|
||||||
return fmt.Errorf("id not present")
|
return fmt.Errorf("id not present")
|
||||||
}
|
}
|
||||||
if model == nil {
|
if model == nil {
|
||||||
|
|
@ -29,8 +29,8 @@ func mapFields(
|
||||||
var databaseId int64
|
var databaseId int64
|
||||||
if model.DatabaseId.ValueInt64() != 0 {
|
if model.DatabaseId.ValueInt64() != 0 {
|
||||||
databaseId = model.DatabaseId.ValueInt64()
|
databaseId = model.DatabaseId.ValueInt64()
|
||||||
} else if source.Id != nil {
|
} else if source.Id != 0 {
|
||||||
databaseId = *source.Id
|
databaseId = int64(source.Id)
|
||||||
} else {
|
} else {
|
||||||
return fmt.Errorf("database id not present")
|
return fmt.Errorf("database id not present")
|
||||||
}
|
}
|
||||||
|
|
@ -38,7 +38,7 @@ func mapFields(
|
||||||
model.Id = types.Int64Value(databaseId)
|
model.Id = types.Int64Value(databaseId)
|
||||||
model.DatabaseId = types.Int64Value(databaseId)
|
model.DatabaseId = types.Int64Value(databaseId)
|
||||||
model.Name = types.StringValue(source.GetName())
|
model.Name = types.StringValue(source.GetName())
|
||||||
model.Owner = types.StringPointerValue(cleanString(source.Owner))
|
model.Owner = types.StringValue(cleanString(source.Owner))
|
||||||
model.Region = types.StringValue(region)
|
model.Region = types.StringValue(region)
|
||||||
model.ProjectId = types.StringValue(model.ProjectId.ValueString())
|
model.ProjectId = types.StringValue(model.ProjectId.ValueString())
|
||||||
model.InstanceId = types.StringValue(model.InstanceId.ValueString())
|
model.InstanceId = types.StringValue(model.InstanceId.ValueString())
|
||||||
|
|
@ -53,11 +53,11 @@ func mapFields(
|
||||||
}
|
}
|
||||||
|
|
||||||
// mapResourceFields maps fields from a GetDatabase API response to a resourceModel for the resource.
|
// mapResourceFields maps fields from a GetDatabase API response to a resourceModel for the resource.
|
||||||
func mapResourceFields(source *postgresflexalpha.GetDatabaseResponse, model *resourceModel) error {
|
func mapResourceFields(source *v3alpha1api.GetDatabaseResponse, model *resourceModel) error {
|
||||||
if source == nil {
|
if source == nil {
|
||||||
return fmt.Errorf("response is nil")
|
return fmt.Errorf("response is nil")
|
||||||
}
|
}
|
||||||
if source.Id == nil || *source.Id == 0 {
|
if source.Id == 0 {
|
||||||
return fmt.Errorf("id not present")
|
return fmt.Errorf("id not present")
|
||||||
}
|
}
|
||||||
if model == nil {
|
if model == nil {
|
||||||
|
|
@ -67,8 +67,8 @@ func mapResourceFields(source *postgresflexalpha.GetDatabaseResponse, model *res
|
||||||
var databaseId int64
|
var databaseId int64
|
||||||
if model.Id.ValueInt64() != 0 {
|
if model.Id.ValueInt64() != 0 {
|
||||||
databaseId = model.Id.ValueInt64()
|
databaseId = model.Id.ValueInt64()
|
||||||
} else if source.Id != nil {
|
} else if source.Id != 0 {
|
||||||
databaseId = *source.Id
|
databaseId = int64(source.Id)
|
||||||
} else {
|
} else {
|
||||||
return fmt.Errorf("database id not present")
|
return fmt.Errorf("database id not present")
|
||||||
}
|
}
|
||||||
|
|
@ -76,18 +76,18 @@ func mapResourceFields(source *postgresflexalpha.GetDatabaseResponse, model *res
|
||||||
model.Id = types.Int64Value(databaseId)
|
model.Id = types.Int64Value(databaseId)
|
||||||
model.DatabaseId = types.Int64Value(databaseId)
|
model.DatabaseId = types.Int64Value(databaseId)
|
||||||
model.Name = types.StringValue(source.GetName())
|
model.Name = types.StringValue(source.GetName())
|
||||||
model.Owner = types.StringPointerValue(cleanString(source.Owner))
|
model.Owner = types.StringValue(cleanString(source.Owner))
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// toCreatePayload converts the resource model to an API create payload.
|
// toCreatePayload converts the resource model to an API create payload.
|
||||||
func toCreatePayload(model *resourceModel) (*postgresflexalpha.CreateDatabaseRequestPayload, error) {
|
func toCreatePayload(model *resourceModel) (*v3alpha1api.CreateDatabaseRequestPayload, error) {
|
||||||
if model == nil {
|
if model == nil {
|
||||||
return nil, fmt.Errorf("nil model")
|
return nil, fmt.Errorf("nil model")
|
||||||
}
|
}
|
||||||
|
|
||||||
return &postgresflexalpha.CreateDatabaseRequestPayload{
|
return &v3alpha1api.CreateDatabaseRequestPayload{
|
||||||
Name: model.Name.ValueStringPointer(),
|
Name: model.Name.ValueString(),
|
||||||
Owner: model.Owner.ValueStringPointer(),
|
Owner: model.Owner.ValueStringPointer(),
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,8 @@ import (
|
||||||
"github.com/hashicorp/terraform-plugin-framework/types"
|
"github.com/hashicorp/terraform-plugin-framework/types"
|
||||||
"github.com/stackitcloud/stackit-sdk-go/core/utils"
|
"github.com/stackitcloud/stackit-sdk-go/core/utils"
|
||||||
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
|
postgresflexalpha "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
|
|
||||||
datasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/datasources_gen"
|
datasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/datasources_gen"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -31,9 +32,9 @@ func TestMapFields(t *testing.T) {
|
||||||
name: "should map fields correctly",
|
name: "should map fields correctly",
|
||||||
given: given{
|
given: given{
|
||||||
source: &postgresflexalpha.ListDatabase{
|
source: &postgresflexalpha.ListDatabase{
|
||||||
Id: utils.Ptr(int64(1)),
|
Id: int32(1),
|
||||||
Name: utils.Ptr("my-db"),
|
Name: "my-db",
|
||||||
Owner: utils.Ptr("\"my-owner\""),
|
Owner: "my-owner",
|
||||||
},
|
},
|
||||||
model: &dataSourceModel{
|
model: &dataSourceModel{
|
||||||
DatabaseModel: datasource.DatabaseModel{
|
DatabaseModel: datasource.DatabaseModel{
|
||||||
|
|
@ -62,8 +63,8 @@ func TestMapFields(t *testing.T) {
|
||||||
name: "should preserve existing model ID",
|
name: "should preserve existing model ID",
|
||||||
given: given{
|
given: given{
|
||||||
source: &postgresflexalpha.ListDatabase{
|
source: &postgresflexalpha.ListDatabase{
|
||||||
Id: utils.Ptr(int64(1)),
|
Id: int32(1),
|
||||||
Name: utils.Ptr("my-db"),
|
Name: "my-db",
|
||||||
},
|
},
|
||||||
model: &dataSourceModel{
|
model: &dataSourceModel{
|
||||||
DatabaseModel: datasource.DatabaseModel{
|
DatabaseModel: datasource.DatabaseModel{
|
||||||
|
|
@ -77,9 +78,10 @@ func TestMapFields(t *testing.T) {
|
||||||
expected: expected{
|
expected: expected{
|
||||||
model: &dataSourceModel{
|
model: &dataSourceModel{
|
||||||
DatabaseModel: datasource.DatabaseModel{
|
DatabaseModel: datasource.DatabaseModel{
|
||||||
Id: types.Int64Value(1),
|
Id: types.Int64Value(1),
|
||||||
Name: types.StringValue("my-db"),
|
Name: types.StringValue("my-db"),
|
||||||
Owner: types.StringNull(), DatabaseId: types.Int64Value(1),
|
Owner: types.StringValue(""),
|
||||||
|
DatabaseId: types.Int64Value(1),
|
||||||
Region: types.StringValue("eu01"),
|
Region: types.StringValue("eu01"),
|
||||||
InstanceId: types.StringValue("my-instance"),
|
InstanceId: types.StringValue("my-instance"),
|
||||||
ProjectId: types.StringValue("my-project"),
|
ProjectId: types.StringValue("my-project"),
|
||||||
|
|
@ -99,7 +101,7 @@ func TestMapFields(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "should fail on nil source ID",
|
name: "should fail on nil source ID",
|
||||||
given: given{
|
given: given{
|
||||||
source: &postgresflexalpha.ListDatabase{Id: nil},
|
source: &postgresflexalpha.ListDatabase{Id: 0},
|
||||||
model: &dataSourceModel{},
|
model: &dataSourceModel{},
|
||||||
},
|
},
|
||||||
expected: expected{err: true},
|
expected: expected{err: true},
|
||||||
|
|
@ -107,7 +109,7 @@ func TestMapFields(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "should fail on nil model",
|
name: "should fail on nil model",
|
||||||
given: given{
|
given: given{
|
||||||
source: &postgresflexalpha.ListDatabase{Id: utils.Ptr(int64(1))},
|
source: &postgresflexalpha.ListDatabase{Id: int32(1)},
|
||||||
model: nil,
|
model: nil,
|
||||||
},
|
},
|
||||||
expected: expected{err: true},
|
expected: expected{err: true},
|
||||||
|
|
@ -150,9 +152,9 @@ func TestMapResourceFields(t *testing.T) {
|
||||||
name: "should map fields correctly",
|
name: "should map fields correctly",
|
||||||
given: given{
|
given: given{
|
||||||
source: &postgresflexalpha.GetDatabaseResponse{
|
source: &postgresflexalpha.GetDatabaseResponse{
|
||||||
Id: utils.Ptr(int64(1)),
|
Id: int32(1),
|
||||||
Name: utils.Ptr("my-db"),
|
Name: "my-db",
|
||||||
Owner: utils.Ptr("my-owner"),
|
Owner: "my-owner",
|
||||||
},
|
},
|
||||||
model: &resourceModel{},
|
model: &resourceModel{},
|
||||||
},
|
},
|
||||||
|
|
@ -216,7 +218,7 @@ func TestToCreatePayload(t *testing.T) {
|
||||||
},
|
},
|
||||||
expected: expected{
|
expected: expected{
|
||||||
payload: &postgresflexalpha.CreateDatabaseRequestPayload{
|
payload: &postgresflexalpha.CreateDatabaseRequestPayload{
|
||||||
Name: utils.Ptr("my-db"),
|
Name: "my-db",
|
||||||
Owner: utils.Ptr("my-owner"),
|
Owner: utils.Ptr("my-owner"),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -14,14 +14,14 @@ import (
|
||||||
"github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
|
"github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
|
||||||
"github.com/hashicorp/terraform-plugin-framework/types"
|
"github.com/hashicorp/terraform-plugin-framework/types"
|
||||||
"github.com/hashicorp/terraform-plugin-log/tflog"
|
"github.com/hashicorp/terraform-plugin-log/tflog"
|
||||||
|
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
|
||||||
postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/resources_gen"
|
postgresflexalphaResGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/database/resources_gen"
|
||||||
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
|
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
|
||||||
postgresflexalpha3 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/postgresflexalpha"
|
postgresflexalphaWait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/postgresflexalpha"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
|
@ -43,7 +43,7 @@ func NewDatabaseResource() resource.Resource {
|
||||||
}
|
}
|
||||||
|
|
||||||
// resourceModel describes the resource data model.
|
// resourceModel describes the resource data model.
|
||||||
type resourceModel = postgresflexalpha2.DatabaseModel
|
type resourceModel = postgresflexalphaResGen.DatabaseModel
|
||||||
|
|
||||||
// DatabaseResourceIdentityModel describes the resource's identity attributes.
|
// DatabaseResourceIdentityModel describes the resource's identity attributes.
|
||||||
type DatabaseResourceIdentityModel struct {
|
type DatabaseResourceIdentityModel struct {
|
||||||
|
|
@ -55,7 +55,7 @@ type DatabaseResourceIdentityModel struct {
|
||||||
|
|
||||||
// databaseResource is the resource implementation.
|
// databaseResource is the resource implementation.
|
||||||
type databaseResource struct {
|
type databaseResource struct {
|
||||||
client *postgresflexalpha.APIClient
|
client *v3alpha1api.APIClient
|
||||||
providerData core.ProviderData
|
providerData core.ProviderData
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -122,7 +122,7 @@ var modifiersFileByte []byte
|
||||||
|
|
||||||
// Schema defines the schema for the resource.
|
// Schema defines the schema for the resource.
|
||||||
func (r *databaseResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
|
func (r *databaseResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
|
||||||
s := postgresflexalpha2.DatabaseResourceSchema(ctx)
|
s := postgresflexalphaResGen.DatabaseResourceSchema(ctx)
|
||||||
|
|
||||||
fields, err := utils.ReadModifiersConfig(modifiersFileByte)
|
fields, err := utils.ReadModifiersConfig(modifiersFileByte)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
@ -198,7 +198,7 @@ func (r *databaseResource) Create(
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
// Create new database
|
// Create new database
|
||||||
databaseResp, err := r.client.CreateDatabaseRequest(
|
databaseResp, err := r.client.DefaultAPI.CreateDatabaseRequest(
|
||||||
ctx,
|
ctx,
|
||||||
projectId,
|
projectId,
|
||||||
region,
|
region,
|
||||||
|
|
@ -209,16 +209,17 @@ func (r *databaseResource) Create(
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if databaseResp == nil || databaseResp.Id == nil {
|
dbID, ok := databaseResp.GetIdOk()
|
||||||
|
if !ok {
|
||||||
core.LogAndAddError(
|
core.LogAndAddError(
|
||||||
ctx,
|
ctx,
|
||||||
&resp.Diagnostics,
|
&resp.Diagnostics,
|
||||||
funcErrorSummary,
|
funcErrorSummary,
|
||||||
"API didn't return database Id. A database might have been created",
|
"API didn't return database Id. A database might although have been created",
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
databaseId := *databaseResp.Id
|
databaseId := int64(*dbID)
|
||||||
ctx = tflog.SetField(ctx, "database_id", databaseId)
|
ctx = tflog.SetField(ctx, "database_id", databaseId)
|
||||||
ctx = core.LogResponse(ctx)
|
ctx = core.LogResponse(ctx)
|
||||||
|
|
||||||
|
|
@ -234,7 +235,7 @@ func (r *databaseResource) Create(
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
database, err := postgresflexalpha3.GetDatabaseByIdWaitHandler(ctx, r.client, projectId, instanceId, region, databaseId).
|
database, err := postgresflexalphaWait.GetDatabaseByIdWaitHandler(ctx, r.client.DefaultAPI, projectId, instanceId, region, databaseId).
|
||||||
SetTimeout(15 * time.Minute).
|
SetTimeout(15 * time.Minute).
|
||||||
SetSleepBeforeWait(15 * time.Second).
|
SetSleepBeforeWait(15 * time.Second).
|
||||||
WaitWithContext(ctx)
|
WaitWithContext(ctx)
|
||||||
|
|
@ -293,7 +294,7 @@ func (r *databaseResource) Read(
|
||||||
ctx = tflog.SetField(ctx, "region", region)
|
ctx = tflog.SetField(ctx, "region", region)
|
||||||
ctx = tflog.SetField(ctx, "database_id", databaseId)
|
ctx = tflog.SetField(ctx, "database_id", databaseId)
|
||||||
|
|
||||||
databaseResp, err := postgresflexalpha3.GetDatabaseByIdWaitHandler(ctx, r.client, projectId, instanceId, region, databaseId).
|
databaseResp, err := postgresflexalphaWait.GetDatabaseByIdWaitHandler(ctx, r.client.DefaultAPI, projectId, instanceId, region, databaseId).
|
||||||
SetTimeout(15 * time.Minute).
|
SetTimeout(15 * time.Minute).
|
||||||
SetSleepBeforeWait(15 * time.Second).
|
SetSleepBeforeWait(15 * time.Second).
|
||||||
WaitWithContext(ctx)
|
WaitWithContext(ctx)
|
||||||
|
|
@ -321,13 +322,12 @@ func (r *databaseResource) Read(
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: use values from api to identify drift
|
|
||||||
// Save identity into Terraform state
|
// Save identity into Terraform state
|
||||||
identity := DatabaseResourceIdentityModel{
|
identity := DatabaseResourceIdentityModel{
|
||||||
ProjectID: types.StringValue(projectId),
|
ProjectID: types.StringValue(projectId),
|
||||||
Region: types.StringValue(region),
|
Region: types.StringValue(region),
|
||||||
InstanceID: types.StringValue(instanceId),
|
InstanceID: types.StringValue(instanceId),
|
||||||
DatabaseID: types.Int64Value(databaseId),
|
DatabaseID: types.Int64Value(int64(databaseResp.GetId())),
|
||||||
}
|
}
|
||||||
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
|
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
|
||||||
if resp.Diagnostics.HasError() {
|
if resp.Diagnostics.HasError() {
|
||||||
|
|
@ -361,13 +361,7 @@ func (r *databaseResource) Update(
|
||||||
projectId := model.ProjectId.ValueString()
|
projectId := model.ProjectId.ValueString()
|
||||||
instanceId := model.InstanceId.ValueString()
|
instanceId := model.InstanceId.ValueString()
|
||||||
region := model.Region.ValueString()
|
region := model.Region.ValueString()
|
||||||
databaseId64 := model.DatabaseId.ValueInt64()
|
databaseId := model.DatabaseId.ValueInt64()
|
||||||
|
|
||||||
if databaseId64 > math.MaxInt32 {
|
|
||||||
core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (databaseId)")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
databaseId := int32(databaseId64) // nolint:gosec // check is performed above
|
|
||||||
|
|
||||||
ctx = tflog.SetField(ctx, "project_id", projectId)
|
ctx = tflog.SetField(ctx, "project_id", projectId)
|
||||||
ctx = tflog.SetField(ctx, "instance_id", instanceId)
|
ctx = tflog.SetField(ctx, "instance_id", instanceId)
|
||||||
|
|
@ -383,7 +377,7 @@ func (r *databaseResource) Update(
|
||||||
}
|
}
|
||||||
|
|
||||||
modified := false
|
modified := false
|
||||||
var payload postgresflexalpha.UpdateDatabasePartiallyRequestPayload
|
var payload v3alpha1api.UpdateDatabasePartiallyRequestPayload
|
||||||
if stateModel.Name != model.Name {
|
if stateModel.Name != model.Name {
|
||||||
payload.Name = model.Name.ValueStringPointer()
|
payload.Name = model.Name.ValueStringPointer()
|
||||||
modified = true
|
modified = true
|
||||||
|
|
@ -399,13 +393,18 @@ func (r *databaseResource) Update(
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if databaseId > math.MaxInt32 {
|
||||||
|
core.LogAndAddError(ctx, &resp.Diagnostics, "error updating database", "databaseID out of bounds for int32")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
databaseID32 := int32(databaseId) //nolint:gosec // TODO
|
||||||
// Update existing database
|
// Update existing database
|
||||||
err := r.client.UpdateDatabasePartiallyRequest(
|
err := r.client.DefaultAPI.UpdateDatabasePartiallyRequest(
|
||||||
ctx,
|
ctx,
|
||||||
projectId,
|
projectId,
|
||||||
region,
|
region,
|
||||||
instanceId,
|
instanceId,
|
||||||
databaseId,
|
databaseID32,
|
||||||
).UpdateDatabasePartiallyRequestPayload(payload).Execute()
|
).UpdateDatabasePartiallyRequestPayload(payload).Execute()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
core.LogAndAddError(ctx, &resp.Diagnostics, "error updating database", err.Error())
|
core.LogAndAddError(ctx, &resp.Diagnostics, "error updating database", err.Error())
|
||||||
|
|
@ -414,7 +413,7 @@ func (r *databaseResource) Update(
|
||||||
|
|
||||||
ctx = core.LogResponse(ctx)
|
ctx = core.LogResponse(ctx)
|
||||||
|
|
||||||
databaseResp, err := postgresflexalpha3.GetDatabaseByIdWaitHandler(ctx, r.client, projectId, instanceId, region, databaseId64).
|
databaseResp, err := postgresflexalphaWait.GetDatabaseByIdWaitHandler(ctx, r.client.DefaultAPI, projectId, instanceId, region, databaseId).
|
||||||
SetTimeout(15 * time.Minute).
|
SetTimeout(15 * time.Minute).
|
||||||
SetSleepBeforeWait(15 * time.Second).
|
SetSleepBeforeWait(15 * time.Second).
|
||||||
WaitWithContext(ctx)
|
WaitWithContext(ctx)
|
||||||
|
|
@ -442,7 +441,7 @@ func (r *databaseResource) Update(
|
||||||
ProjectID: types.StringValue(projectId),
|
ProjectID: types.StringValue(projectId),
|
||||||
Region: types.StringValue(region),
|
Region: types.StringValue(region),
|
||||||
InstanceID: types.StringValue(instanceId),
|
InstanceID: types.StringValue(instanceId),
|
||||||
DatabaseID: types.Int64Value(databaseId64),
|
DatabaseID: types.Int64Value(databaseId),
|
||||||
}
|
}
|
||||||
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
|
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
|
||||||
if resp.Diagnostics.HasError() {
|
if resp.Diagnostics.HasError() {
|
||||||
|
|
@ -500,7 +499,7 @@ func (r *databaseResource) Delete(
|
||||||
ctx = tflog.SetField(ctx, "database_id", databaseId)
|
ctx = tflog.SetField(ctx, "database_id", databaseId)
|
||||||
|
|
||||||
// Delete existing record set
|
// Delete existing record set
|
||||||
err := r.client.DeleteDatabaseRequestExecute(ctx, projectId, region, instanceId, databaseId)
|
err := r.client.DefaultAPI.DeleteDatabaseRequest(ctx, projectId, region, instanceId, databaseId).Execute()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting database", fmt.Sprintf("Calling API: %v", err))
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting database", fmt.Sprintf("Calling API: %v", err))
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
package postgresFlexAlphaFlavor
|
package postgresflexalphaflavor
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
|
@ -8,8 +8,8 @@ import (
|
||||||
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
|
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
|
||||||
"github.com/hashicorp/terraform-plugin-framework/types"
|
"github.com/hashicorp/terraform-plugin-framework/types"
|
||||||
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
|
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
|
||||||
|
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
|
||||||
postgresflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavors/datasources_gen"
|
postgresflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavors/datasources_gen"
|
||||||
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
|
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
|
||||||
|
|
@ -30,13 +30,13 @@ type FlavorModel struct {
|
||||||
ProjectId types.String `tfsdk:"project_id"`
|
ProjectId types.String `tfsdk:"project_id"`
|
||||||
Region types.String `tfsdk:"region"`
|
Region types.String `tfsdk:"region"`
|
||||||
StorageClass types.String `tfsdk:"storage_class"`
|
StorageClass types.String `tfsdk:"storage_class"`
|
||||||
Cpu types.Int64 `tfsdk:"cpu"`
|
Cpu types.Int32 `tfsdk:"cpu"`
|
||||||
Description types.String `tfsdk:"description"`
|
Description types.String `tfsdk:"description"`
|
||||||
Id types.String `tfsdk:"id"`
|
Id types.String `tfsdk:"id"`
|
||||||
FlavorId types.String `tfsdk:"flavor_id"`
|
FlavorId types.String `tfsdk:"flavor_id"`
|
||||||
MaxGb types.Int64 `tfsdk:"max_gb"`
|
MaxGb types.Int32 `tfsdk:"max_gb"`
|
||||||
Memory types.Int64 `tfsdk:"ram"`
|
Memory types.Int32 `tfsdk:"ram"`
|
||||||
MinGb types.Int64 `tfsdk:"min_gb"`
|
MinGb types.Int32 `tfsdk:"min_gb"`
|
||||||
NodeType types.String `tfsdk:"node_type"`
|
NodeType types.String `tfsdk:"node_type"`
|
||||||
StorageClasses types.List `tfsdk:"storage_classes"`
|
StorageClasses types.List `tfsdk:"storage_classes"`
|
||||||
}
|
}
|
||||||
|
|
@ -48,7 +48,7 @@ func NewFlavorDataSource() datasource.DataSource {
|
||||||
|
|
||||||
// flavorDataSource is the data source implementation.
|
// flavorDataSource is the data source implementation.
|
||||||
type flavorDataSource struct {
|
type flavorDataSource struct {
|
||||||
client *postgresflexalpha.APIClient
|
client *v3alpha1api.APIClient
|
||||||
providerData core.ProviderData
|
providerData core.ProviderData
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -86,12 +86,12 @@ func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaReques
|
||||||
Description: "The flavor description.",
|
Description: "The flavor description.",
|
||||||
MarkdownDescription: "The flavor description.",
|
MarkdownDescription: "The flavor description.",
|
||||||
},
|
},
|
||||||
"cpu": schema.Int64Attribute{
|
"cpu": schema.Int32Attribute{
|
||||||
Required: true,
|
Required: true,
|
||||||
Description: "The cpu count of the instance.",
|
Description: "The cpu count of the instance.",
|
||||||
MarkdownDescription: "The cpu count of the instance.",
|
MarkdownDescription: "The cpu count of the instance.",
|
||||||
},
|
},
|
||||||
"ram": schema.Int64Attribute{
|
"ram": schema.Int32Attribute{
|
||||||
Required: true,
|
Required: true,
|
||||||
Description: "The memory of the instance in Gibibyte.",
|
Description: "The memory of the instance in Gibibyte.",
|
||||||
MarkdownDescription: "The memory of the instance in Gibibyte.",
|
MarkdownDescription: "The memory of the instance in Gibibyte.",
|
||||||
|
|
@ -116,12 +116,12 @@ func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaReques
|
||||||
Description: "The flavor id of the instance flavor.",
|
Description: "The flavor id of the instance flavor.",
|
||||||
MarkdownDescription: "The flavor id of the instance flavor.",
|
MarkdownDescription: "The flavor id of the instance flavor.",
|
||||||
},
|
},
|
||||||
"max_gb": schema.Int64Attribute{
|
"max_gb": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
|
Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
|
||||||
MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
|
MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
|
||||||
},
|
},
|
||||||
"min_gb": schema.Int64Attribute{
|
"min_gb": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
Description: "minimum storage which is required to order in Gigabyte.",
|
Description: "minimum storage which is required to order in Gigabyte.",
|
||||||
MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
|
MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
|
||||||
|
|
@ -138,10 +138,10 @@ func (r *flavorDataSource) Schema(ctx context.Context, _ datasource.SchemaReques
|
||||||
"class": schema.StringAttribute{
|
"class": schema.StringAttribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
"max_io_per_sec": schema.Int64Attribute{
|
"max_io_per_sec": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
"max_through_in_mb": schema.Int64Attribute{
|
"max_through_in_mb": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
@ -171,25 +171,25 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest,
|
||||||
ctx = tflog.SetField(ctx, "project_id", projectId)
|
ctx = tflog.SetField(ctx, "project_id", projectId)
|
||||||
ctx = tflog.SetField(ctx, "region", region)
|
ctx = tflog.SetField(ctx, "region", region)
|
||||||
|
|
||||||
flavors, err := getAllFlavors(ctx, r.client, projectId, region)
|
flavors, err := getAllFlavors(ctx, r.client.DefaultAPI, projectId, region)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading flavors", fmt.Sprintf("getAllFlavors: %v", err))
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading flavors", fmt.Sprintf("getAllFlavors: %v", err))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
var foundFlavors []postgresflexalpha.ListFlavors
|
var foundFlavors []v3alpha1api.ListFlavors
|
||||||
for _, flavor := range flavors {
|
for _, flavor := range flavors {
|
||||||
if model.Cpu.ValueInt64() != *flavor.Cpu {
|
if model.Cpu.ValueInt32() != flavor.Cpu {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if model.Memory.ValueInt64() != *flavor.Memory {
|
if model.Memory.ValueInt32() != flavor.Memory {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if model.NodeType.ValueString() != *flavor.NodeType {
|
if model.NodeType.ValueString() != flavor.NodeType {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
for _, sc := range *flavor.StorageClasses {
|
for _, sc := range flavor.StorageClasses {
|
||||||
if model.StorageClass.ValueString() != *sc.Class {
|
if model.StorageClass.ValueString() != sc.Class {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
foundFlavors = append(foundFlavors, flavor)
|
foundFlavors = append(foundFlavors, flavor)
|
||||||
|
|
@ -205,11 +205,11 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest,
|
||||||
}
|
}
|
||||||
|
|
||||||
f := foundFlavors[0]
|
f := foundFlavors[0]
|
||||||
model.Description = types.StringValue(*f.Description)
|
model.Description = types.StringValue(f.Description)
|
||||||
model.Id = utils.BuildInternalTerraformId(model.ProjectId.ValueString(), region, *f.Id)
|
model.Id = utils.BuildInternalTerraformId(model.ProjectId.ValueString(), region, f.Id)
|
||||||
model.FlavorId = types.StringValue(*f.Id)
|
model.FlavorId = types.StringValue(f.Id)
|
||||||
model.MaxGb = types.Int64Value(*f.MaxGB)
|
model.MaxGb = types.Int32Value(f.MaxGB)
|
||||||
model.MinGb = types.Int64Value(*f.MinGB)
|
model.MinGb = types.Int32Value(f.MinGB)
|
||||||
|
|
||||||
if f.StorageClasses == nil {
|
if f.StorageClasses == nil {
|
||||||
model.StorageClasses = types.ListNull(postgresflexalphaGen.StorageClassesType{
|
model.StorageClasses = types.ListNull(postgresflexalphaGen.StorageClassesType{
|
||||||
|
|
@ -219,15 +219,15 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest,
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
var scList []attr.Value
|
var scList []attr.Value
|
||||||
for _, sc := range *f.StorageClasses {
|
for _, sc := range f.StorageClasses {
|
||||||
scList = append(
|
scList = append(
|
||||||
scList,
|
scList,
|
||||||
postgresflexalphaGen.NewStorageClassesValueMust(
|
postgresflexalphaGen.NewStorageClassesValueMust(
|
||||||
postgresflexalphaGen.StorageClassesValue{}.AttributeTypes(ctx),
|
postgresflexalphaGen.StorageClassesValue{}.AttributeTypes(ctx),
|
||||||
map[string]attr.Value{
|
map[string]attr.Value{
|
||||||
"class": types.StringValue(*sc.Class),
|
"class": types.StringValue(sc.Class),
|
||||||
"max_io_per_sec": types.Int64Value(*sc.MaxIoPerSec),
|
"max_io_per_sec": types.Int32Value(sc.MaxIoPerSec),
|
||||||
"max_through_in_mb": types.Int64Value(*sc.MaxThroughInMb),
|
"max_through_in_mb": types.Int32Value(sc.MaxThroughInMb),
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -23,7 +23,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
"flavors": schema.ListNestedAttribute{
|
"flavors": schema.ListNestedAttribute{
|
||||||
NestedObject: schema.NestedAttributeObject{
|
NestedObject: schema.NestedAttributeObject{
|
||||||
Attributes: map[string]schema.Attribute{
|
Attributes: map[string]schema.Attribute{
|
||||||
"cpu": schema.Int64Attribute{
|
"cpu": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
Description: "The cpu count of the instance.",
|
Description: "The cpu count of the instance.",
|
||||||
MarkdownDescription: "The cpu count of the instance.",
|
MarkdownDescription: "The cpu count of the instance.",
|
||||||
|
|
@ -38,17 +38,17 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
Description: "The id of the instance flavor.",
|
Description: "The id of the instance flavor.",
|
||||||
MarkdownDescription: "The id of the instance flavor.",
|
MarkdownDescription: "The id of the instance flavor.",
|
||||||
},
|
},
|
||||||
"max_gb": schema.Int64Attribute{
|
"max_gb": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
|
Description: "maximum storage which can be ordered for the flavor in Gigabyte.",
|
||||||
MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
|
MarkdownDescription: "maximum storage which can be ordered for the flavor in Gigabyte.",
|
||||||
},
|
},
|
||||||
"memory": schema.Int64Attribute{
|
"memory": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
Description: "The memory of the instance in Gibibyte.",
|
Description: "The memory of the instance in Gibibyte.",
|
||||||
MarkdownDescription: "The memory of the instance in Gibibyte.",
|
MarkdownDescription: "The memory of the instance in Gibibyte.",
|
||||||
},
|
},
|
||||||
"min_gb": schema.Int64Attribute{
|
"min_gb": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
Description: "minimum storage which is required to order in Gigabyte.",
|
Description: "minimum storage which is required to order in Gigabyte.",
|
||||||
MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
|
MarkdownDescription: "minimum storage which is required to order in Gigabyte.",
|
||||||
|
|
@ -64,10 +64,10 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
"class": schema.StringAttribute{
|
"class": schema.StringAttribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
"max_io_per_sec": schema.Int64Attribute{
|
"max_io_per_sec": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
"max_through_in_mb": schema.Int64Attribute{
|
"max_through_in_mb": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
@ -92,7 +92,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
Description: "List of flavors available for the project.",
|
Description: "List of flavors available for the project.",
|
||||||
MarkdownDescription: "List of flavors available for the project.",
|
MarkdownDescription: "List of flavors available for the project.",
|
||||||
},
|
},
|
||||||
"page": schema.Int64Attribute{
|
"page": schema.Int32Attribute{
|
||||||
Optional: true,
|
Optional: true,
|
||||||
Computed: true,
|
Computed: true,
|
||||||
Description: "Number of the page of items list to be returned.",
|
Description: "Number of the page of items list to be returned.",
|
||||||
|
|
@ -100,19 +100,19 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
},
|
},
|
||||||
"pagination": schema.SingleNestedAttribute{
|
"pagination": schema.SingleNestedAttribute{
|
||||||
Attributes: map[string]schema.Attribute{
|
Attributes: map[string]schema.Attribute{
|
||||||
"page": schema.Int64Attribute{
|
"page": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
"size": schema.Int64Attribute{
|
"size": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
"sort": schema.StringAttribute{
|
"sort": schema.StringAttribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
"total_pages": schema.Int64Attribute{
|
"total_pages": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
"total_rows": schema.Int64Attribute{
|
"total_rows": schema.Int32Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
@ -138,7 +138,7 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"size": schema.Int64Attribute{
|
"size": schema.Int32Attribute{
|
||||||
Optional: true,
|
Optional: true,
|
||||||
Computed: true,
|
Computed: true,
|
||||||
Description: "Number of items to be returned on each page.",
|
Description: "Number of items to be returned on each page.",
|
||||||
|
|
@ -178,11 +178,11 @@ func FlavorsDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
|
|
||||||
type FlavorsModel struct {
|
type FlavorsModel struct {
|
||||||
Flavors types.List `tfsdk:"flavors"`
|
Flavors types.List `tfsdk:"flavors"`
|
||||||
Page types.Int64 `tfsdk:"page"`
|
Page types.Int32 `tfsdk:"page"`
|
||||||
Pagination PaginationValue `tfsdk:"pagination"`
|
Pagination PaginationValue `tfsdk:"pagination"`
|
||||||
ProjectId types.String `tfsdk:"project_id"`
|
ProjectId types.String `tfsdk:"project_id"`
|
||||||
Region types.String `tfsdk:"region"`
|
Region types.String `tfsdk:"region"`
|
||||||
Size types.Int64 `tfsdk:"size"`
|
Size types.Int32 `tfsdk:"size"`
|
||||||
Sort types.String `tfsdk:"sort"`
|
Sort types.String `tfsdk:"sort"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -221,12 +221,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
|
||||||
return nil, diags
|
return nil, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
|
cpuVal, ok := cpuAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
|
fmt.Sprintf(`cpu expected to be basetypes.Int32Value, was: %T`, cpuAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
descriptionAttribute, ok := attributes["description"]
|
descriptionAttribute, ok := attributes["description"]
|
||||||
|
|
@ -275,12 +275,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
|
||||||
return nil, diags
|
return nil, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
|
maxGbVal, ok := maxGbAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
|
fmt.Sprintf(`max_gb expected to be basetypes.Int32Value, was: %T`, maxGbAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
memoryAttribute, ok := attributes["memory"]
|
memoryAttribute, ok := attributes["memory"]
|
||||||
|
|
@ -293,12 +293,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
|
||||||
return nil, diags
|
return nil, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
|
memoryVal, ok := memoryAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
|
fmt.Sprintf(`memory expected to be basetypes.Int32Value, was: %T`, memoryAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
minGbAttribute, ok := attributes["min_gb"]
|
minGbAttribute, ok := attributes["min_gb"]
|
||||||
|
|
@ -311,12 +311,12 @@ func (t FlavorsType) ValueFromObject(ctx context.Context, in basetypes.ObjectVal
|
||||||
return nil, diags
|
return nil, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
|
minGbVal, ok := minGbAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
|
fmt.Sprintf(`min_gb expected to be basetypes.Int32Value, was: %T`, minGbAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
nodeTypeAttribute, ok := attributes["node_type"]
|
nodeTypeAttribute, ok := attributes["node_type"]
|
||||||
|
|
@ -445,12 +445,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
|
||||||
return NewFlavorsValueUnknown(), diags
|
return NewFlavorsValueUnknown(), diags
|
||||||
}
|
}
|
||||||
|
|
||||||
cpuVal, ok := cpuAttribute.(basetypes.Int64Value)
|
cpuVal, ok := cpuAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`cpu expected to be basetypes.Int64Value, was: %T`, cpuAttribute))
|
fmt.Sprintf(`cpu expected to be basetypes.Int32Value, was: %T`, cpuAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
descriptionAttribute, ok := attributes["description"]
|
descriptionAttribute, ok := attributes["description"]
|
||||||
|
|
@ -499,12 +499,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
|
||||||
return NewFlavorsValueUnknown(), diags
|
return NewFlavorsValueUnknown(), diags
|
||||||
}
|
}
|
||||||
|
|
||||||
maxGbVal, ok := maxGbAttribute.(basetypes.Int64Value)
|
maxGbVal, ok := maxGbAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`max_gb expected to be basetypes.Int64Value, was: %T`, maxGbAttribute))
|
fmt.Sprintf(`max_gb expected to be basetypes.Int32Value, was: %T`, maxGbAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
memoryAttribute, ok := attributes["memory"]
|
memoryAttribute, ok := attributes["memory"]
|
||||||
|
|
@ -517,12 +517,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
|
||||||
return NewFlavorsValueUnknown(), diags
|
return NewFlavorsValueUnknown(), diags
|
||||||
}
|
}
|
||||||
|
|
||||||
memoryVal, ok := memoryAttribute.(basetypes.Int64Value)
|
memoryVal, ok := memoryAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`memory expected to be basetypes.Int64Value, was: %T`, memoryAttribute))
|
fmt.Sprintf(`memory expected to be basetypes.Int32Value, was: %T`, memoryAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
minGbAttribute, ok := attributes["min_gb"]
|
minGbAttribute, ok := attributes["min_gb"]
|
||||||
|
|
@ -535,12 +535,12 @@ func NewFlavorsValue(attributeTypes map[string]attr.Type, attributes map[string]
|
||||||
return NewFlavorsValueUnknown(), diags
|
return NewFlavorsValueUnknown(), diags
|
||||||
}
|
}
|
||||||
|
|
||||||
minGbVal, ok := minGbAttribute.(basetypes.Int64Value)
|
minGbVal, ok := minGbAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`min_gb expected to be basetypes.Int64Value, was: %T`, minGbAttribute))
|
fmt.Sprintf(`min_gb expected to be basetypes.Int32Value, was: %T`, minGbAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
nodeTypeAttribute, ok := attributes["node_type"]
|
nodeTypeAttribute, ok := attributes["node_type"]
|
||||||
|
|
@ -664,12 +664,12 @@ func (t FlavorsType) ValueType(ctx context.Context) attr.Value {
|
||||||
var _ basetypes.ObjectValuable = FlavorsValue{}
|
var _ basetypes.ObjectValuable = FlavorsValue{}
|
||||||
|
|
||||||
type FlavorsValue struct {
|
type FlavorsValue struct {
|
||||||
Cpu basetypes.Int64Value `tfsdk:"cpu"`
|
Cpu basetypes.Int32Value `tfsdk:"cpu"`
|
||||||
Description basetypes.StringValue `tfsdk:"description"`
|
Description basetypes.StringValue `tfsdk:"description"`
|
||||||
Id basetypes.StringValue `tfsdk:"id"`
|
Id basetypes.StringValue `tfsdk:"id"`
|
||||||
MaxGb basetypes.Int64Value `tfsdk:"max_gb"`
|
MaxGb basetypes.Int32Value `tfsdk:"max_gb"`
|
||||||
Memory basetypes.Int64Value `tfsdk:"memory"`
|
Memory basetypes.Int32Value `tfsdk:"memory"`
|
||||||
MinGb basetypes.Int64Value `tfsdk:"min_gb"`
|
MinGb basetypes.Int32Value `tfsdk:"min_gb"`
|
||||||
NodeType basetypes.StringValue `tfsdk:"node_type"`
|
NodeType basetypes.StringValue `tfsdk:"node_type"`
|
||||||
StorageClasses basetypes.ListValue `tfsdk:"storage_classes"`
|
StorageClasses basetypes.ListValue `tfsdk:"storage_classes"`
|
||||||
state attr.ValueState
|
state attr.ValueState
|
||||||
|
|
@ -681,12 +681,12 @@ func (v FlavorsValue) ToTerraformValue(ctx context.Context) (tftypes.Value, erro
|
||||||
var val tftypes.Value
|
var val tftypes.Value
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
attrTypes["cpu"] = basetypes.Int64Type{}.TerraformType(ctx)
|
attrTypes["cpu"] = basetypes.Int32Type{}.TerraformType(ctx)
|
||||||
attrTypes["description"] = basetypes.StringType{}.TerraformType(ctx)
|
attrTypes["description"] = basetypes.StringType{}.TerraformType(ctx)
|
||||||
attrTypes["id"] = basetypes.StringType{}.TerraformType(ctx)
|
attrTypes["id"] = basetypes.StringType{}.TerraformType(ctx)
|
||||||
attrTypes["max_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
|
attrTypes["max_gb"] = basetypes.Int32Type{}.TerraformType(ctx)
|
||||||
attrTypes["memory"] = basetypes.Int64Type{}.TerraformType(ctx)
|
attrTypes["memory"] = basetypes.Int32Type{}.TerraformType(ctx)
|
||||||
attrTypes["min_gb"] = basetypes.Int64Type{}.TerraformType(ctx)
|
attrTypes["min_gb"] = basetypes.Int32Type{}.TerraformType(ctx)
|
||||||
attrTypes["node_type"] = basetypes.StringType{}.TerraformType(ctx)
|
attrTypes["node_type"] = basetypes.StringType{}.TerraformType(ctx)
|
||||||
attrTypes["storage_classes"] = basetypes.ListType{
|
attrTypes["storage_classes"] = basetypes.ListType{
|
||||||
ElemType: StorageClassesValue{}.Type(ctx),
|
ElemType: StorageClassesValue{}.Type(ctx),
|
||||||
|
|
@ -821,12 +821,12 @@ func (v FlavorsValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue,
|
||||||
}
|
}
|
||||||
|
|
||||||
attributeTypes := map[string]attr.Type{
|
attributeTypes := map[string]attr.Type{
|
||||||
"cpu": basetypes.Int64Type{},
|
"cpu": basetypes.Int32Type{},
|
||||||
"description": basetypes.StringType{},
|
"description": basetypes.StringType{},
|
||||||
"id": basetypes.StringType{},
|
"id": basetypes.StringType{},
|
||||||
"max_gb": basetypes.Int64Type{},
|
"max_gb": basetypes.Int32Type{},
|
||||||
"memory": basetypes.Int64Type{},
|
"memory": basetypes.Int32Type{},
|
||||||
"min_gb": basetypes.Int64Type{},
|
"min_gb": basetypes.Int32Type{},
|
||||||
"node_type": basetypes.StringType{},
|
"node_type": basetypes.StringType{},
|
||||||
"storage_classes": basetypes.ListType{
|
"storage_classes": basetypes.ListType{
|
||||||
ElemType: StorageClassesValue{}.Type(ctx),
|
ElemType: StorageClassesValue{}.Type(ctx),
|
||||||
|
|
@ -917,12 +917,12 @@ func (v FlavorsValue) Type(ctx context.Context) attr.Type {
|
||||||
|
|
||||||
func (v FlavorsValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
|
func (v FlavorsValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
|
||||||
return map[string]attr.Type{
|
return map[string]attr.Type{
|
||||||
"cpu": basetypes.Int64Type{},
|
"cpu": basetypes.Int32Type{},
|
||||||
"description": basetypes.StringType{},
|
"description": basetypes.StringType{},
|
||||||
"id": basetypes.StringType{},
|
"id": basetypes.StringType{},
|
||||||
"max_gb": basetypes.Int64Type{},
|
"max_gb": basetypes.Int32Type{},
|
||||||
"memory": basetypes.Int64Type{},
|
"memory": basetypes.Int32Type{},
|
||||||
"min_gb": basetypes.Int64Type{},
|
"min_gb": basetypes.Int32Type{},
|
||||||
"node_type": basetypes.StringType{},
|
"node_type": basetypes.StringType{},
|
||||||
"storage_classes": basetypes.ListType{
|
"storage_classes": basetypes.ListType{
|
||||||
ElemType: StorageClassesValue{}.Type(ctx),
|
ElemType: StorageClassesValue{}.Type(ctx),
|
||||||
|
|
@ -983,12 +983,12 @@ func (t StorageClassesType) ValueFromObject(ctx context.Context, in basetypes.Ob
|
||||||
return nil, diags
|
return nil, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
|
maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute))
|
fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int32Value, was: %T`, maxIoPerSecAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
|
maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
|
||||||
|
|
@ -1001,12 +1001,12 @@ func (t StorageClassesType) ValueFromObject(ctx context.Context, in basetypes.Ob
|
||||||
return nil, diags
|
return nil, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value)
|
maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute))
|
fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int32Value, was: %T`, maxThroughInMbAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
if diags.HasError() {
|
if diags.HasError() {
|
||||||
|
|
@ -1112,12 +1112,12 @@ func NewStorageClassesValue(attributeTypes map[string]attr.Type, attributes map[
|
||||||
return NewStorageClassesValueUnknown(), diags
|
return NewStorageClassesValueUnknown(), diags
|
||||||
}
|
}
|
||||||
|
|
||||||
maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int64Value)
|
maxIoPerSecVal, ok := maxIoPerSecAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int64Value, was: %T`, maxIoPerSecAttribute))
|
fmt.Sprintf(`max_io_per_sec expected to be basetypes.Int32Value, was: %T`, maxIoPerSecAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
|
maxThroughInMbAttribute, ok := attributes["max_through_in_mb"]
|
||||||
|
|
@ -1130,12 +1130,12 @@ func NewStorageClassesValue(attributeTypes map[string]attr.Type, attributes map[
|
||||||
return NewStorageClassesValueUnknown(), diags
|
return NewStorageClassesValueUnknown(), diags
|
||||||
}
|
}
|
||||||
|
|
||||||
maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int64Value)
|
maxThroughInMbVal, ok := maxThroughInMbAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int64Value, was: %T`, maxThroughInMbAttribute))
|
fmt.Sprintf(`max_through_in_mb expected to be basetypes.Int32Value, was: %T`, maxThroughInMbAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
if diags.HasError() {
|
if diags.HasError() {
|
||||||
|
|
@ -1219,8 +1219,8 @@ var _ basetypes.ObjectValuable = StorageClassesValue{}
|
||||||
|
|
||||||
type StorageClassesValue struct {
|
type StorageClassesValue struct {
|
||||||
Class basetypes.StringValue `tfsdk:"class"`
|
Class basetypes.StringValue `tfsdk:"class"`
|
||||||
MaxIoPerSec basetypes.Int64Value `tfsdk:"max_io_per_sec"`
|
MaxIoPerSec basetypes.Int32Value `tfsdk:"max_io_per_sec"`
|
||||||
MaxThroughInMb basetypes.Int64Value `tfsdk:"max_through_in_mb"`
|
MaxThroughInMb basetypes.Int32Value `tfsdk:"max_through_in_mb"`
|
||||||
state attr.ValueState
|
state attr.ValueState
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1231,8 +1231,8 @@ func (v StorageClassesValue) ToTerraformValue(ctx context.Context) (tftypes.Valu
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
attrTypes["class"] = basetypes.StringType{}.TerraformType(ctx)
|
attrTypes["class"] = basetypes.StringType{}.TerraformType(ctx)
|
||||||
attrTypes["max_io_per_sec"] = basetypes.Int64Type{}.TerraformType(ctx)
|
attrTypes["max_io_per_sec"] = basetypes.Int32Type{}.TerraformType(ctx)
|
||||||
attrTypes["max_through_in_mb"] = basetypes.Int64Type{}.TerraformType(ctx)
|
attrTypes["max_through_in_mb"] = basetypes.Int32Type{}.TerraformType(ctx)
|
||||||
|
|
||||||
objectType := tftypes.Object{AttributeTypes: attrTypes}
|
objectType := tftypes.Object{AttributeTypes: attrTypes}
|
||||||
|
|
||||||
|
|
@ -1295,8 +1295,8 @@ func (v StorageClassesValue) ToObjectValue(ctx context.Context) (basetypes.Objec
|
||||||
|
|
||||||
attributeTypes := map[string]attr.Type{
|
attributeTypes := map[string]attr.Type{
|
||||||
"class": basetypes.StringType{},
|
"class": basetypes.StringType{},
|
||||||
"max_io_per_sec": basetypes.Int64Type{},
|
"max_io_per_sec": basetypes.Int32Type{},
|
||||||
"max_through_in_mb": basetypes.Int64Type{},
|
"max_through_in_mb": basetypes.Int32Type{},
|
||||||
}
|
}
|
||||||
|
|
||||||
if v.IsNull() {
|
if v.IsNull() {
|
||||||
|
|
@ -1359,8 +1359,8 @@ func (v StorageClassesValue) Type(ctx context.Context) attr.Type {
|
||||||
func (v StorageClassesValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
|
func (v StorageClassesValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
|
||||||
return map[string]attr.Type{
|
return map[string]attr.Type{
|
||||||
"class": basetypes.StringType{},
|
"class": basetypes.StringType{},
|
||||||
"max_io_per_sec": basetypes.Int64Type{},
|
"max_io_per_sec": basetypes.Int32Type{},
|
||||||
"max_through_in_mb": basetypes.Int64Type{},
|
"max_through_in_mb": basetypes.Int32Type{},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1399,12 +1399,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
|
||||||
return nil, diags
|
return nil, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
pageVal, ok := pageAttribute.(basetypes.Int64Value)
|
pageVal, ok := pageAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
|
fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
sizeAttribute, ok := attributes["size"]
|
sizeAttribute, ok := attributes["size"]
|
||||||
|
|
@ -1417,12 +1417,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
|
||||||
return nil, diags
|
return nil, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
|
sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
|
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
sortAttribute, ok := attributes["sort"]
|
sortAttribute, ok := attributes["sort"]
|
||||||
|
|
@ -1453,12 +1453,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
|
||||||
return nil, diags
|
return nil, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
|
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
|
fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
totalRowsAttribute, ok := attributes["total_rows"]
|
totalRowsAttribute, ok := attributes["total_rows"]
|
||||||
|
|
@ -1471,12 +1471,12 @@ func (t PaginationType) ValueFromObject(ctx context.Context, in basetypes.Object
|
||||||
return nil, diags
|
return nil, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
|
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
|
fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
if diags.HasError() {
|
if diags.HasError() {
|
||||||
|
|
@ -1566,12 +1566,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
|
||||||
return NewPaginationValueUnknown(), diags
|
return NewPaginationValueUnknown(), diags
|
||||||
}
|
}
|
||||||
|
|
||||||
pageVal, ok := pageAttribute.(basetypes.Int64Value)
|
pageVal, ok := pageAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`page expected to be basetypes.Int64Value, was: %T`, pageAttribute))
|
fmt.Sprintf(`page expected to be basetypes.Int32Value, was: %T`, pageAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
sizeAttribute, ok := attributes["size"]
|
sizeAttribute, ok := attributes["size"]
|
||||||
|
|
@ -1584,12 +1584,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
|
||||||
return NewPaginationValueUnknown(), diags
|
return NewPaginationValueUnknown(), diags
|
||||||
}
|
}
|
||||||
|
|
||||||
sizeVal, ok := sizeAttribute.(basetypes.Int64Value)
|
sizeVal, ok := sizeAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`size expected to be basetypes.Int64Value, was: %T`, sizeAttribute))
|
fmt.Sprintf(`size expected to be basetypes.Int32Value, was: %T`, sizeAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
sortAttribute, ok := attributes["sort"]
|
sortAttribute, ok := attributes["sort"]
|
||||||
|
|
@ -1620,12 +1620,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
|
||||||
return NewPaginationValueUnknown(), diags
|
return NewPaginationValueUnknown(), diags
|
||||||
}
|
}
|
||||||
|
|
||||||
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int64Value)
|
totalPagesVal, ok := totalPagesAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`total_pages expected to be basetypes.Int64Value, was: %T`, totalPagesAttribute))
|
fmt.Sprintf(`total_pages expected to be basetypes.Int32Value, was: %T`, totalPagesAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
totalRowsAttribute, ok := attributes["total_rows"]
|
totalRowsAttribute, ok := attributes["total_rows"]
|
||||||
|
|
@ -1638,12 +1638,12 @@ func NewPaginationValue(attributeTypes map[string]attr.Type, attributes map[stri
|
||||||
return NewPaginationValueUnknown(), diags
|
return NewPaginationValueUnknown(), diags
|
||||||
}
|
}
|
||||||
|
|
||||||
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int64Value)
|
totalRowsVal, ok := totalRowsAttribute.(basetypes.Int32Value)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`total_rows expected to be basetypes.Int64Value, was: %T`, totalRowsAttribute))
|
fmt.Sprintf(`total_rows expected to be basetypes.Int32Value, was: %T`, totalRowsAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
if diags.HasError() {
|
if diags.HasError() {
|
||||||
|
|
@ -1728,11 +1728,11 @@ func (t PaginationType) ValueType(ctx context.Context) attr.Value {
|
||||||
var _ basetypes.ObjectValuable = PaginationValue{}
|
var _ basetypes.ObjectValuable = PaginationValue{}
|
||||||
|
|
||||||
type PaginationValue struct {
|
type PaginationValue struct {
|
||||||
Page basetypes.Int64Value `tfsdk:"page"`
|
Page basetypes.Int32Value `tfsdk:"page"`
|
||||||
Size basetypes.Int64Value `tfsdk:"size"`
|
Size basetypes.Int32Value `tfsdk:"size"`
|
||||||
Sort basetypes.StringValue `tfsdk:"sort"`
|
Sort basetypes.StringValue `tfsdk:"sort"`
|
||||||
TotalPages basetypes.Int64Value `tfsdk:"total_pages"`
|
TotalPages basetypes.Int32Value `tfsdk:"total_pages"`
|
||||||
TotalRows basetypes.Int64Value `tfsdk:"total_rows"`
|
TotalRows basetypes.Int32Value `tfsdk:"total_rows"`
|
||||||
state attr.ValueState
|
state attr.ValueState
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1742,11 +1742,11 @@ func (v PaginationValue) ToTerraformValue(ctx context.Context) (tftypes.Value, e
|
||||||
var val tftypes.Value
|
var val tftypes.Value
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
attrTypes["page"] = basetypes.Int64Type{}.TerraformType(ctx)
|
attrTypes["page"] = basetypes.Int32Type{}.TerraformType(ctx)
|
||||||
attrTypes["size"] = basetypes.Int64Type{}.TerraformType(ctx)
|
attrTypes["size"] = basetypes.Int32Type{}.TerraformType(ctx)
|
||||||
attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
|
attrTypes["sort"] = basetypes.StringType{}.TerraformType(ctx)
|
||||||
attrTypes["total_pages"] = basetypes.Int64Type{}.TerraformType(ctx)
|
attrTypes["total_pages"] = basetypes.Int32Type{}.TerraformType(ctx)
|
||||||
attrTypes["total_rows"] = basetypes.Int64Type{}.TerraformType(ctx)
|
attrTypes["total_rows"] = basetypes.Int32Type{}.TerraformType(ctx)
|
||||||
|
|
||||||
objectType := tftypes.Object{AttributeTypes: attrTypes}
|
objectType := tftypes.Object{AttributeTypes: attrTypes}
|
||||||
|
|
||||||
|
|
@ -1824,11 +1824,11 @@ func (v PaginationValue) ToObjectValue(ctx context.Context) (basetypes.ObjectVal
|
||||||
var diags diag.Diagnostics
|
var diags diag.Diagnostics
|
||||||
|
|
||||||
attributeTypes := map[string]attr.Type{
|
attributeTypes := map[string]attr.Type{
|
||||||
"page": basetypes.Int64Type{},
|
"page": basetypes.Int32Type{},
|
||||||
"size": basetypes.Int64Type{},
|
"size": basetypes.Int32Type{},
|
||||||
"sort": basetypes.StringType{},
|
"sort": basetypes.StringType{},
|
||||||
"total_pages": basetypes.Int64Type{},
|
"total_pages": basetypes.Int32Type{},
|
||||||
"total_rows": basetypes.Int64Type{},
|
"total_rows": basetypes.Int32Type{},
|
||||||
}
|
}
|
||||||
|
|
||||||
if v.IsNull() {
|
if v.IsNull() {
|
||||||
|
|
@ -1900,10 +1900,10 @@ func (v PaginationValue) Type(ctx context.Context) attr.Type {
|
||||||
|
|
||||||
func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
|
func (v PaginationValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
|
||||||
return map[string]attr.Type{
|
return map[string]attr.Type{
|
||||||
"page": basetypes.Int64Type{},
|
"page": basetypes.Int32Type{},
|
||||||
"size": basetypes.Int64Type{},
|
"size": basetypes.Int32Type{},
|
||||||
"sort": basetypes.StringType{},
|
"sort": basetypes.StringType{},
|
||||||
"total_pages": basetypes.Int64Type{},
|
"total_pages": basetypes.Int32Type{},
|
||||||
"total_rows": basetypes.Int64Type{},
|
"total_rows": basetypes.Int32Type{},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,24 +1,24 @@
|
||||||
package postgresFlexAlphaFlavor
|
package postgresflexalphaflavor
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
|
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
)
|
)
|
||||||
|
|
||||||
type flavorsClientReader interface {
|
type flavorsClientReader interface {
|
||||||
GetFlavorsRequest(
|
GetFlavorsRequest(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
projectId, region string,
|
projectId, region string,
|
||||||
) postgresflex.ApiGetFlavorsRequestRequest
|
) v3alpha1api.ApiGetFlavorsRequestRequest
|
||||||
}
|
}
|
||||||
|
|
||||||
func getAllFlavors(ctx context.Context, client flavorsClientReader, projectId, region string) (
|
func getAllFlavors(ctx context.Context, client flavorsClientReader, projectId, region string) (
|
||||||
[]postgresflex.ListFlavors,
|
[]v3alpha1api.ListFlavors,
|
||||||
error,
|
error,
|
||||||
) {
|
) {
|
||||||
getAllFilter := func(_ postgresflex.ListFlavors) bool { return true }
|
getAllFilter := func(_ v3alpha1api.ListFlavors) bool { return true }
|
||||||
flavorList, err := getFlavorsByFilter(ctx, client, projectId, region, getAllFilter)
|
flavorList, err := getFlavorsByFilter(ctx, client, projectId, region, getAllFilter)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|
@ -32,29 +32,29 @@ func getFlavorsByFilter(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
client flavorsClientReader,
|
client flavorsClientReader,
|
||||||
projectId, region string,
|
projectId, region string,
|
||||||
filter func(db postgresflex.ListFlavors) bool,
|
filter func(db v3alpha1api.ListFlavors) bool,
|
||||||
) ([]postgresflex.ListFlavors, error) {
|
) ([]v3alpha1api.ListFlavors, error) {
|
||||||
if projectId == "" || region == "" {
|
if projectId == "" || region == "" {
|
||||||
return nil, fmt.Errorf("listing postgresflex flavors: projectId and region are required")
|
return nil, fmt.Errorf("listing postgresflex flavors: projectId and region are required")
|
||||||
}
|
}
|
||||||
|
|
||||||
const pageSize = 25
|
const pageSize = 25
|
||||||
|
|
||||||
var result = make([]postgresflex.ListFlavors, 0)
|
var result = make([]v3alpha1api.ListFlavors, 0)
|
||||||
|
|
||||||
for page := int32(1); ; page++ {
|
for page := int32(1); ; page++ {
|
||||||
res, err := client.GetFlavorsRequest(ctx, projectId, region).
|
res, err := client.GetFlavorsRequest(ctx, projectId, region).
|
||||||
Page(page).Size(pageSize).Sort(postgresflex.FLAVORSORT_ID_ASC).Execute()
|
Page(page).Size(pageSize).Sort(v3alpha1api.FLAVORSORT_ID_ASC).Execute()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("requesting flavors list (page %d): %w", page, err)
|
return nil, fmt.Errorf("requesting flavors list (page %d): %w", page, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// If the API returns no flavors, we have reached the end of the list.
|
// If the API returns no flavors, we have reached the end of the list.
|
||||||
if res.Flavors == nil || len(*res.Flavors) == 0 {
|
if len(res.Flavors) == 0 {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, flavor := range *res.Flavors {
|
for _, flavor := range res.Flavors {
|
||||||
if filter(flavor) {
|
if filter(flavor) {
|
||||||
result = append(result, flavor)
|
result = append(result, flavor)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,12 +1,11 @@
|
||||||
package postgresFlexAlphaFlavor
|
package postgresflexalphaflavor
|
||||||
|
|
||||||
|
/*
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stackitcloud/stackit-sdk-go/core/utils"
|
postgresflex "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
|
|
||||||
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type mockRequest struct {
|
type mockRequest struct {
|
||||||
|
|
@ -30,25 +29,25 @@ func (m *mockFlavorsClient) GetFlavorsRequest(_ context.Context, _, _ string) po
|
||||||
return m.executeRequest()
|
return m.executeRequest()
|
||||||
}
|
}
|
||||||
|
|
||||||
var mockResp = func(page int64) (*postgresflex.GetFlavorsResponse, error) {
|
var mockResp = func(page int32) (*postgresflex.GetFlavorsResponse, error) {
|
||||||
if page == 1 {
|
if page == 1 {
|
||||||
return &postgresflex.GetFlavorsResponse{
|
return &postgresflex.GetFlavorsResponse{
|
||||||
Flavors: &[]postgresflex.ListFlavors{
|
Flavors: []postgresflex.ListFlavors{
|
||||||
{Id: utils.Ptr("flavor-1"), Description: utils.Ptr("first")},
|
{Id: "flavor-1", Description: "first"},
|
||||||
{Id: utils.Ptr("flavor-2"), Description: utils.Ptr("second")},
|
{Id: "flavor-2", Description: "second"},
|
||||||
},
|
},
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
if page == 2 {
|
if page == 2 {
|
||||||
return &postgresflex.GetFlavorsResponse{
|
return &postgresflex.GetFlavorsResponse{
|
||||||
Flavors: &[]postgresflex.ListFlavors{
|
Flavors: []postgresflex.ListFlavors{
|
||||||
{Id: utils.Ptr("flavor-3"), Description: utils.Ptr("three")},
|
{Id: "flavor-3", Description: "three"},
|
||||||
},
|
},
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return &postgresflex.GetFlavorsResponse{
|
return &postgresflex.GetFlavorsResponse{
|
||||||
Flavors: &[]postgresflex.ListFlavors{},
|
Flavors: []postgresflex.ListFlavors{},
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -72,7 +71,7 @@ func TestGetFlavorsByFilter(t *testing.T) {
|
||||||
{
|
{
|
||||||
description: "Success - Filter flavors by description",
|
description: "Success - Filter flavors by description",
|
||||||
projectId: "pid", region: "reg",
|
projectId: "pid", region: "reg",
|
||||||
filter: func(f postgresflex.ListFlavors) bool { return *f.Description == "first" },
|
filter: func(f postgresflex.ListFlavors) bool { return f.Description == "first" },
|
||||||
wantCount: 1,
|
wantCount: 1,
|
||||||
wantErr: false,
|
wantErr: false,
|
||||||
},
|
},
|
||||||
|
|
@ -86,10 +85,10 @@ func TestGetFlavorsByFilter(t *testing.T) {
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(
|
t.Run(
|
||||||
tt.description, func(t *testing.T) {
|
tt.description, func(t *testing.T) {
|
||||||
var currentPage int64
|
var currentPage int32
|
||||||
client := &mockFlavorsClient{
|
client := &mockFlavorsClient{
|
||||||
executeRequest: func() postgresflex.ApiGetFlavorsRequestRequest {
|
executeRequest: func() postgresflex.ApiGetFlavorsRequestRequest {
|
||||||
return &mockRequest{
|
return mockRequest{
|
||||||
executeFunc: func() (*postgresflex.GetFlavorsResponse, error) {
|
executeFunc: func() (*postgresflex.GetFlavorsResponse, error) {
|
||||||
currentPage++
|
currentPage++
|
||||||
return mockResp(currentPage)
|
return mockResp(currentPage)
|
||||||
|
|
@ -113,10 +112,10 @@ func TestGetFlavorsByFilter(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGetAllFlavors(t *testing.T) {
|
func TestGetAllFlavors(t *testing.T) {
|
||||||
var currentPage int64
|
var currentPage int32
|
||||||
client := &mockFlavorsClient{
|
client := &mockFlavorsClient{
|
||||||
executeRequest: func() postgresflex.ApiGetFlavorsRequestRequest {
|
executeRequest: func() postgresflex.ApiGetFlavorsRequestRequest {
|
||||||
return &mockRequest{
|
return mockRequest{
|
||||||
executeFunc: func() (*postgresflex.GetFlavorsResponse, error) {
|
executeFunc: func() (*postgresflex.GetFlavorsResponse, error) {
|
||||||
currentPage++
|
currentPage++
|
||||||
return mockResp(currentPage)
|
return mockResp(currentPage)
|
||||||
|
|
@ -133,3 +132,4 @@ func TestGetAllFlavors(t *testing.T) {
|
||||||
t.Errorf("getAllFlavors() expected 3 flavor, got %d", len(res))
|
t.Errorf("getAllFlavors() expected 3 flavor, got %d", len(res))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
*/
|
||||||
|
|
|
||||||
|
|
@ -5,8 +5,8 @@ import (
|
||||||
|
|
||||||
"github.com/hashicorp/terraform-plugin-framework/datasource"
|
"github.com/hashicorp/terraform-plugin-framework/datasource"
|
||||||
"github.com/hashicorp/terraform-plugin-log/tflog"
|
"github.com/hashicorp/terraform-plugin-log/tflog"
|
||||||
|
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
|
||||||
postgresflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavors/datasources_gen"
|
postgresflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavors/datasources_gen"
|
||||||
|
|
@ -26,7 +26,7 @@ func NewFlavorsDataSource() datasource.DataSource {
|
||||||
type dataSourceModel = postgresflexalphaGen.FlavorsModel
|
type dataSourceModel = postgresflexalphaGen.FlavorsModel
|
||||||
|
|
||||||
type flavorsDataSource struct {
|
type flavorsDataSource struct {
|
||||||
client *postgresflexalpha.APIClient
|
client *v3alpha1api.APIClient
|
||||||
providerData core.ProviderData
|
providerData core.ProviderData
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -6,8 +6,8 @@ import (
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/hashicorp/terraform-plugin-framework/types"
|
"github.com/hashicorp/terraform-plugin-framework/types"
|
||||||
|
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
|
||||||
postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/datasources_gen"
|
postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/datasources_gen"
|
||||||
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
|
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
|
||||||
|
|
@ -37,7 +37,7 @@ type dataSourceModel struct {
|
||||||
|
|
||||||
// instanceDataSource is the data source implementation.
|
// instanceDataSource is the data source implementation.
|
||||||
type instanceDataSource struct {
|
type instanceDataSource struct {
|
||||||
client *postgresflexalpha.APIClient
|
client *v3alpha1api.APIClient
|
||||||
providerData core.ProviderData
|
providerData core.ProviderData
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -96,7 +96,7 @@ func (r *instanceDataSource) Read(
|
||||||
ctx = tflog.SetField(ctx, "project_id", projectId)
|
ctx = tflog.SetField(ctx, "project_id", projectId)
|
||||||
ctx = tflog.SetField(ctx, "instance_id", instanceId)
|
ctx = tflog.SetField(ctx, "instance_id", instanceId)
|
||||||
ctx = tflog.SetField(ctx, "region", region)
|
ctx = tflog.SetField(ctx, "region", region)
|
||||||
instanceResp, err := r.client.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
|
instanceResp, err := r.client.DefaultAPI.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
utils.LogError(
|
utils.LogError(
|
||||||
ctx,
|
ctx,
|
||||||
|
|
|
||||||
|
|
@ -28,8 +28,8 @@ func InstanceDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
},
|
},
|
||||||
"backup_schedule": schema.StringAttribute{
|
"backup_schedule": schema.StringAttribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
Description: "The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.",
|
Description: "The schedule for when the database backup will be created. Currently, ONLY daily schedules are supported (every 24 hours). The schedule is written as a cron schedule.",
|
||||||
MarkdownDescription: "The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.",
|
MarkdownDescription: "The schedule for when the database backup will be created. Currently, ONLY daily schedules are supported (every 24 hours). The schedule is written as a cron schedule.",
|
||||||
},
|
},
|
||||||
"connection_info": schema.SingleNestedAttribute{
|
"connection_info": schema.SingleNestedAttribute{
|
||||||
Attributes: map[string]schema.Attribute{
|
Attributes: map[string]schema.Attribute{
|
||||||
|
|
|
||||||
|
|
@ -7,8 +7,8 @@ import (
|
||||||
"github.com/hashicorp/terraform-plugin-framework/attr"
|
"github.com/hashicorp/terraform-plugin-framework/attr"
|
||||||
"github.com/hashicorp/terraform-plugin-framework/types"
|
"github.com/hashicorp/terraform-plugin-framework/types"
|
||||||
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
|
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
|
||||||
|
postgresflex "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
|
|
||||||
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
|
|
||||||
postgresflexalphadatasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/datasources_gen"
|
postgresflexalphadatasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/datasources_gen"
|
||||||
postgresflexalpharesource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/resources_gen"
|
postgresflexalpharesource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/resources_gen"
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
|
||||||
|
|
@ -33,9 +33,7 @@ func mapGetInstanceResponseToModel(
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
isConnectionInfoIncomplete := resp.ConnectionInfo == nil || resp.ConnectionInfo.Write == nil ||
|
isConnectionInfoIncomplete := resp.ConnectionInfo.Write.Host == "" || resp.ConnectionInfo.Write.Port == 0
|
||||||
resp.ConnectionInfo.Write.Host == nil || *resp.ConnectionInfo.Write.Host == "" ||
|
|
||||||
resp.ConnectionInfo.Write.Port == nil || *resp.ConnectionInfo.Write.Port == 0
|
|
||||||
|
|
||||||
if isConnectionInfoIncomplete {
|
if isConnectionInfoIncomplete {
|
||||||
m.ConnectionInfo = postgresflexalpharesource.NewConnectionInfoValueNull()
|
m.ConnectionInfo = postgresflexalpharesource.NewConnectionInfoValueNull()
|
||||||
|
|
@ -43,11 +41,13 @@ func mapGetInstanceResponseToModel(
|
||||||
m.ConnectionInfo = postgresflexalpharesource.NewConnectionInfoValueMust(
|
m.ConnectionInfo = postgresflexalpharesource.NewConnectionInfoValueMust(
|
||||||
postgresflexalpharesource.ConnectionInfoValue{}.AttributeTypes(ctx),
|
postgresflexalpharesource.ConnectionInfoValue{}.AttributeTypes(ctx),
|
||||||
map[string]attr.Value{
|
map[string]attr.Value{
|
||||||
"write": postgresflexalpharesource.NewWriteValueMust(
|
// careful - we can not use NewWriteValueMust here
|
||||||
|
"write": basetypes.NewObjectValueMust(
|
||||||
postgresflexalpharesource.WriteValue{}.AttributeTypes(ctx),
|
postgresflexalpharesource.WriteValue{}.AttributeTypes(ctx),
|
||||||
map[string]attr.Value{
|
map[string]attr.Value{
|
||||||
"host": types.StringPointerValue(resp.ConnectionInfo.Write.Host),
|
"host": types.StringValue(resp.ConnectionInfo.Write.Host),
|
||||||
"port": types.Int64PointerValue(resp.ConnectionInfo.Write.Port),
|
// note: IDE does not show that port is actually an int64 in the Schema
|
||||||
|
"port": types.Int64Value(int64(resp.ConnectionInfo.Write.Port)),
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
|
|
@ -62,7 +62,7 @@ func mapGetInstanceResponseToModel(
|
||||||
m.InstanceId.ValueString(),
|
m.InstanceId.ValueString(),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
m.InstanceId = types.StringPointerValue(resp.Id)
|
m.InstanceId = types.StringValue(resp.Id)
|
||||||
|
|
||||||
m.IsDeletable = types.BoolValue(resp.GetIsDeletable())
|
m.IsDeletable = types.BoolValue(resp.GetIsDeletable())
|
||||||
|
|
||||||
|
|
@ -75,12 +75,12 @@ func mapGetInstanceResponseToModel(
|
||||||
|
|
||||||
netInstAdd := types.StringValue("")
|
netInstAdd := types.StringValue("")
|
||||||
if instAdd, ok := resp.Network.GetInstanceAddressOk(); ok {
|
if instAdd, ok := resp.Network.GetInstanceAddressOk(); ok {
|
||||||
netInstAdd = types.StringValue(instAdd)
|
netInstAdd = types.StringValue(*instAdd)
|
||||||
}
|
}
|
||||||
|
|
||||||
netRtrAdd := types.StringValue("")
|
netRtrAdd := types.StringValue("")
|
||||||
if rtrAdd, ok := resp.Network.GetRouterAddressOk(); ok {
|
if rtrAdd, ok := resp.Network.GetRouterAddressOk(); ok {
|
||||||
netRtrAdd = types.StringValue(rtrAdd)
|
netRtrAdd = types.StringValue(*rtrAdd)
|
||||||
}
|
}
|
||||||
|
|
||||||
net, diags := postgresflexalpharesource.NewNetworkValue(
|
net, diags := postgresflexalpharesource.NewNetworkValue(
|
||||||
|
|
@ -98,7 +98,7 @@ func mapGetInstanceResponseToModel(
|
||||||
|
|
||||||
m.Network = net
|
m.Network = net
|
||||||
m.Replicas = types.Int64Value(int64(resp.GetReplicas()))
|
m.Replicas = types.Int64Value(int64(resp.GetReplicas()))
|
||||||
m.RetentionDays = types.Int64Value(resp.GetRetentionDays())
|
m.RetentionDays = types.Int64Value(int64(resp.GetRetentionDays()))
|
||||||
|
|
||||||
m.Name = types.StringValue(resp.GetName())
|
m.Name = types.StringValue(resp.GetName())
|
||||||
|
|
||||||
|
|
@ -108,7 +108,7 @@ func mapGetInstanceResponseToModel(
|
||||||
postgresflexalpharesource.StorageValue{}.AttributeTypes(ctx),
|
postgresflexalpharesource.StorageValue{}.AttributeTypes(ctx),
|
||||||
map[string]attr.Value{
|
map[string]attr.Value{
|
||||||
"performance_class": types.StringValue(resp.Storage.GetPerformanceClass()),
|
"performance_class": types.StringValue(resp.Storage.GetPerformanceClass()),
|
||||||
"size": types.Int64Value(resp.Storage.GetSize()),
|
"size": types.Int64Value(int64(resp.Storage.GetSize())),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
if diags.HasError() {
|
if diags.HasError() {
|
||||||
|
|
@ -131,7 +131,7 @@ func mapGetDataInstanceResponseToModel(
|
||||||
|
|
||||||
m.FlavorId = types.StringValue(resp.GetFlavorId())
|
m.FlavorId = types.StringValue(resp.GetFlavorId())
|
||||||
m.Id = utils.BuildInternalTerraformId(m.ProjectId.ValueString(), m.Region.ValueString(), m.InstanceId.ValueString())
|
m.Id = utils.BuildInternalTerraformId(m.ProjectId.ValueString(), m.Region.ValueString(), m.InstanceId.ValueString())
|
||||||
m.InstanceId = types.StringPointerValue(resp.Id)
|
m.InstanceId = types.StringValue(resp.Id)
|
||||||
m.IsDeletable = types.BoolValue(resp.GetIsDeletable())
|
m.IsDeletable = types.BoolValue(resp.GetIsDeletable())
|
||||||
m.Name = types.StringValue(resp.GetName())
|
m.Name = types.StringValue(resp.GetName())
|
||||||
|
|
||||||
|
|
@ -141,13 +141,13 @@ func mapGetDataInstanceResponseToModel(
|
||||||
}
|
}
|
||||||
|
|
||||||
m.Replicas = types.Int64Value(int64(resp.GetReplicas()))
|
m.Replicas = types.Int64Value(int64(resp.GetReplicas()))
|
||||||
m.RetentionDays = types.Int64Value(resp.GetRetentionDays())
|
m.RetentionDays = types.Int64Value(int64(resp.GetRetentionDays()))
|
||||||
m.Status = types.StringValue(string(resp.GetStatus()))
|
m.Status = types.StringValue(string(resp.GetStatus()))
|
||||||
storage, diags := postgresflexalphadatasource.NewStorageValue(
|
storage, diags := postgresflexalphadatasource.NewStorageValue(
|
||||||
postgresflexalphadatasource.StorageValue{}.AttributeTypes(ctx),
|
postgresflexalphadatasource.StorageValue{}.AttributeTypes(ctx),
|
||||||
map[string]attr.Value{
|
map[string]attr.Value{
|
||||||
"performance_class": types.StringValue(resp.Storage.GetPerformanceClass()),
|
"performance_class": types.StringValue(resp.Storage.GetPerformanceClass()),
|
||||||
"size": types.Int64Value(resp.Storage.GetSize()),
|
"size": types.Int64Value(int64(resp.Storage.GetSize())),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
if diags.HasError() {
|
if diags.HasError() {
|
||||||
|
|
@ -159,9 +159,7 @@ func mapGetDataInstanceResponseToModel(
|
||||||
}
|
}
|
||||||
|
|
||||||
func handleConnectionInfo(ctx context.Context, m *dataSourceModel, resp *postgresflex.GetInstanceResponse) {
|
func handleConnectionInfo(ctx context.Context, m *dataSourceModel, resp *postgresflex.GetInstanceResponse) {
|
||||||
isConnectionInfoIncomplete := resp.ConnectionInfo == nil || resp.ConnectionInfo.Write == nil ||
|
isConnectionInfoIncomplete := resp.ConnectionInfo.Write.Host == "" || resp.ConnectionInfo.Write.Port == 0
|
||||||
resp.ConnectionInfo.Write.Host == nil || *resp.ConnectionInfo.Write.Host == "" ||
|
|
||||||
resp.ConnectionInfo.Write.Port == nil || *resp.ConnectionInfo.Write.Port == 0
|
|
||||||
|
|
||||||
if isConnectionInfoIncomplete {
|
if isConnectionInfoIncomplete {
|
||||||
m.ConnectionInfo = postgresflexalphadatasource.NewConnectionInfoValueNull()
|
m.ConnectionInfo = postgresflexalphadatasource.NewConnectionInfoValueNull()
|
||||||
|
|
@ -169,11 +167,11 @@ func handleConnectionInfo(ctx context.Context, m *dataSourceModel, resp *postgre
|
||||||
m.ConnectionInfo = postgresflexalphadatasource.NewConnectionInfoValueMust(
|
m.ConnectionInfo = postgresflexalphadatasource.NewConnectionInfoValueMust(
|
||||||
postgresflexalphadatasource.ConnectionInfoValue{}.AttributeTypes(ctx),
|
postgresflexalphadatasource.ConnectionInfoValue{}.AttributeTypes(ctx),
|
||||||
map[string]attr.Value{
|
map[string]attr.Value{
|
||||||
"write": postgresflexalphadatasource.NewWriteValueMust(
|
"write": types.ObjectValueMust(
|
||||||
postgresflexalphadatasource.WriteValue{}.AttributeTypes(ctx),
|
postgresflexalphadatasource.WriteValue{}.AttributeTypes(ctx),
|
||||||
map[string]attr.Value{
|
map[string]attr.Value{
|
||||||
"host": types.StringPointerValue(resp.ConnectionInfo.Write.Host),
|
"host": types.StringValue(resp.ConnectionInfo.Write.Host),
|
||||||
"port": types.Int64PointerValue(resp.ConnectionInfo.Write.Port),
|
"port": types.Int64Value(int64(resp.ConnectionInfo.Write.Port)),
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
|
|
@ -182,26 +180,26 @@ func handleConnectionInfo(ctx context.Context, m *dataSourceModel, resp *postgre
|
||||||
}
|
}
|
||||||
|
|
||||||
func handleNetwork(ctx context.Context, m *dataSourceModel, resp *postgresflex.GetInstanceResponse) error {
|
func handleNetwork(ctx context.Context, m *dataSourceModel, resp *postgresflex.GetInstanceResponse) error {
|
||||||
netAcl, diags := types.ListValueFrom(ctx, types.StringType, resp.Network.GetAcl())
|
netACL, diags := types.ListValueFrom(ctx, types.StringType, resp.Network.GetAcl())
|
||||||
if diags.HasError() {
|
if diags.HasError() {
|
||||||
return fmt.Errorf("failed converting network acl from response")
|
return fmt.Errorf("failed converting network acl from response")
|
||||||
}
|
}
|
||||||
|
|
||||||
instAddr := ""
|
instAddr := ""
|
||||||
if iA, ok := resp.Network.GetInstanceAddressOk(); ok {
|
if iA, ok := resp.Network.GetInstanceAddressOk(); ok {
|
||||||
instAddr = iA
|
instAddr = *iA
|
||||||
}
|
}
|
||||||
|
|
||||||
rtrAddr := ""
|
rtrAddr := ""
|
||||||
if rA, ok := resp.Network.GetRouterAddressOk(); ok {
|
if rA, ok := resp.Network.GetRouterAddressOk(); ok {
|
||||||
rtrAddr = rA
|
rtrAddr = *rA
|
||||||
}
|
}
|
||||||
|
|
||||||
net, diags := postgresflexalphadatasource.NewNetworkValue(
|
net, diags := postgresflexalphadatasource.NewNetworkValue(
|
||||||
postgresflexalphadatasource.NetworkValue{}.AttributeTypes(ctx),
|
postgresflexalphadatasource.NetworkValue{}.AttributeTypes(ctx),
|
||||||
map[string]attr.Value{
|
map[string]attr.Value{
|
||||||
"access_scope": types.StringValue(string(resp.Network.GetAccessScope())),
|
"access_scope": types.StringValue(string(resp.Network.GetAccessScope())),
|
||||||
"acl": netAcl,
|
"acl": netACL,
|
||||||
"instance_address": types.StringValue(instAddr),
|
"instance_address": types.StringValue(instAddr),
|
||||||
"router_address": types.StringValue(rtrAddr),
|
"router_address": types.StringValue(rtrAddr),
|
||||||
},
|
},
|
||||||
|
|
@ -216,22 +214,22 @@ func handleNetwork(ctx context.Context, m *dataSourceModel, resp *postgresflex.G
|
||||||
func handleEncryption(m *dataSourceModel, resp *postgresflex.GetInstanceResponse) {
|
func handleEncryption(m *dataSourceModel, resp *postgresflex.GetInstanceResponse) {
|
||||||
keyId := ""
|
keyId := ""
|
||||||
if keyIdVal, ok := resp.Encryption.GetKekKeyIdOk(); ok {
|
if keyIdVal, ok := resp.Encryption.GetKekKeyIdOk(); ok {
|
||||||
keyId = keyIdVal
|
keyId = *keyIdVal
|
||||||
}
|
}
|
||||||
|
|
||||||
keyRingId := ""
|
keyRingId := ""
|
||||||
if keyRingIdVal, ok := resp.Encryption.GetKekKeyRingIdOk(); ok {
|
if keyRingIdVal, ok := resp.Encryption.GetKekKeyRingIdOk(); ok {
|
||||||
keyRingId = keyRingIdVal
|
keyRingId = *keyRingIdVal
|
||||||
}
|
}
|
||||||
|
|
||||||
keyVersion := ""
|
keyVersion := ""
|
||||||
if keyVersionVal, ok := resp.Encryption.GetKekKeyVersionOk(); ok {
|
if keyVersionVal, ok := resp.Encryption.GetKekKeyVersionOk(); ok {
|
||||||
keyVersion = keyVersionVal
|
keyVersion = *keyVersionVal
|
||||||
}
|
}
|
||||||
|
|
||||||
svcAcc := ""
|
svcAcc := ""
|
||||||
if svcAccVal, ok := resp.Encryption.GetServiceAccountOk(); ok {
|
if svcAccVal, ok := resp.Encryption.GetServiceAccountOk(); ok {
|
||||||
svcAcc = svcAccVal
|
svcAcc = *svcAccVal
|
||||||
}
|
}
|
||||||
|
|
||||||
m.Encryption = postgresflexalphadatasource.EncryptionValue{
|
m.Encryption = postgresflexalphadatasource.EncryptionValue{
|
||||||
|
|
|
||||||
|
|
@ -1,746 +1,191 @@
|
||||||
package postgresflexalpha
|
package postgresflexalpha
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/stackitcloud/stackit-sdk-go/core/utils"
|
"context"
|
||||||
|
"testing"
|
||||||
|
|
||||||
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
|
"github.com/hashicorp/terraform-plugin-framework/types"
|
||||||
|
|
||||||
|
postgresflex "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
|
|
||||||
|
postgresflexalpharesource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/resources_gen"
|
||||||
|
utils2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
//nolint:unused // TODO: remove when used
|
func Test_handleConnectionInfo(t *testing.T) {
|
||||||
type testFlavor struct {
|
type args struct {
|
||||||
Cpu int64
|
ctx context.Context
|
||||||
Description string
|
m *dataSourceModel
|
||||||
Id string
|
hostName string
|
||||||
MaxGB int64
|
port int32
|
||||||
Memory int64
|
|
||||||
MinGB int64
|
|
||||||
NodeType string
|
|
||||||
StorageClasses []testFlavorStorageClass
|
|
||||||
}
|
|
||||||
|
|
||||||
//nolint:unused // TODO: remove when used
|
|
||||||
type testFlavorStorageClass struct {
|
|
||||||
Class string
|
|
||||||
MaxIoPerSec int64
|
|
||||||
MaxThroughInMb int64
|
|
||||||
}
|
|
||||||
|
|
||||||
//nolint:unused // TODO: remove when used
|
|
||||||
var responseList = []testFlavor{
|
|
||||||
{
|
|
||||||
Cpu: 1,
|
|
||||||
Description: "flavor 1.1",
|
|
||||||
Id: "flv1.1",
|
|
||||||
MaxGB: 500,
|
|
||||||
Memory: 1,
|
|
||||||
MinGB: 5,
|
|
||||||
NodeType: "single",
|
|
||||||
StorageClasses: []testFlavorStorageClass{
|
|
||||||
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Cpu: 1,
|
|
||||||
Description: "flavor 1.2",
|
|
||||||
Id: "flv1.2",
|
|
||||||
MaxGB: 500,
|
|
||||||
Memory: 2,
|
|
||||||
MinGB: 5,
|
|
||||||
NodeType: "single",
|
|
||||||
StorageClasses: []testFlavorStorageClass{
|
|
||||||
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Cpu: 1,
|
|
||||||
Description: "flavor 1.3",
|
|
||||||
Id: "flv1.3",
|
|
||||||
MaxGB: 500,
|
|
||||||
Memory: 3,
|
|
||||||
MinGB: 5,
|
|
||||||
NodeType: "single",
|
|
||||||
StorageClasses: []testFlavorStorageClass{
|
|
||||||
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Cpu: 1,
|
|
||||||
Description: "flavor 1.4",
|
|
||||||
Id: "flv1.4",
|
|
||||||
MaxGB: 500,
|
|
||||||
Memory: 4,
|
|
||||||
MinGB: 5,
|
|
||||||
NodeType: "single",
|
|
||||||
StorageClasses: []testFlavorStorageClass{
|
|
||||||
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Cpu: 1,
|
|
||||||
Description: "flavor 1.5",
|
|
||||||
Id: "flv1.5",
|
|
||||||
MaxGB: 500,
|
|
||||||
Memory: 5,
|
|
||||||
MinGB: 5,
|
|
||||||
NodeType: "single",
|
|
||||||
StorageClasses: []testFlavorStorageClass{
|
|
||||||
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Cpu: 1,
|
|
||||||
Description: "flavor 1.6",
|
|
||||||
Id: "flv1.6",
|
|
||||||
MaxGB: 500,
|
|
||||||
Memory: 6,
|
|
||||||
MinGB: 5,
|
|
||||||
NodeType: "single",
|
|
||||||
StorageClasses: []testFlavorStorageClass{
|
|
||||||
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Cpu: 1,
|
|
||||||
Description: "flavor 1.7",
|
|
||||||
Id: "flv1.7",
|
|
||||||
MaxGB: 500,
|
|
||||||
Memory: 7,
|
|
||||||
MinGB: 5,
|
|
||||||
NodeType: "single",
|
|
||||||
StorageClasses: []testFlavorStorageClass{
|
|
||||||
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Cpu: 1,
|
|
||||||
Description: "flavor 1.8",
|
|
||||||
Id: "flv1.8",
|
|
||||||
MaxGB: 500,
|
|
||||||
Memory: 8,
|
|
||||||
MinGB: 5,
|
|
||||||
NodeType: "single",
|
|
||||||
StorageClasses: []testFlavorStorageClass{
|
|
||||||
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Cpu: 1,
|
|
||||||
Description: "flavor 1.9",
|
|
||||||
Id: "flv1.9",
|
|
||||||
MaxGB: 500,
|
|
||||||
Memory: 9,
|
|
||||||
MinGB: 5,
|
|
||||||
NodeType: "single",
|
|
||||||
StorageClasses: []testFlavorStorageClass{
|
|
||||||
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
/* ......................................................... */
|
|
||||||
{
|
|
||||||
Cpu: 2,
|
|
||||||
Description: "flavor 2.1",
|
|
||||||
Id: "flv2.1",
|
|
||||||
MaxGB: 500,
|
|
||||||
Memory: 1,
|
|
||||||
MinGB: 5,
|
|
||||||
NodeType: "single",
|
|
||||||
StorageClasses: []testFlavorStorageClass{
|
|
||||||
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Cpu: 2,
|
|
||||||
Description: "flavor 2.2",
|
|
||||||
Id: "flv2.2",
|
|
||||||
MaxGB: 500,
|
|
||||||
Memory: 2,
|
|
||||||
MinGB: 5,
|
|
||||||
NodeType: "single",
|
|
||||||
StorageClasses: []testFlavorStorageClass{
|
|
||||||
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Cpu: 2,
|
|
||||||
Description: "flavor 2.3",
|
|
||||||
Id: "flv2.3",
|
|
||||||
MaxGB: 500,
|
|
||||||
Memory: 3,
|
|
||||||
MinGB: 5,
|
|
||||||
NodeType: "single",
|
|
||||||
StorageClasses: []testFlavorStorageClass{
|
|
||||||
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Cpu: 2,
|
|
||||||
Description: "flavor 2.4",
|
|
||||||
Id: "flv2.4",
|
|
||||||
MaxGB: 500,
|
|
||||||
Memory: 4,
|
|
||||||
MinGB: 5,
|
|
||||||
NodeType: "single",
|
|
||||||
StorageClasses: []testFlavorStorageClass{
|
|
||||||
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Cpu: 2,
|
|
||||||
Description: "flavor 2.5",
|
|
||||||
Id: "flv2.5",
|
|
||||||
MaxGB: 500,
|
|
||||||
Memory: 5,
|
|
||||||
MinGB: 5,
|
|
||||||
NodeType: "single",
|
|
||||||
StorageClasses: []testFlavorStorageClass{
|
|
||||||
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Cpu: 2,
|
|
||||||
Description: "flavor 2.6",
|
|
||||||
Id: "flv2.6",
|
|
||||||
MaxGB: 500,
|
|
||||||
Memory: 6,
|
|
||||||
MinGB: 5,
|
|
||||||
NodeType: "single",
|
|
||||||
StorageClasses: []testFlavorStorageClass{
|
|
||||||
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
/* ......................................................... */
|
|
||||||
{
|
|
||||||
Cpu: 1,
|
|
||||||
Description: "flavor 1.1 replica",
|
|
||||||
Id: "flv1.1r",
|
|
||||||
MaxGB: 500,
|
|
||||||
Memory: 1,
|
|
||||||
MinGB: 5,
|
|
||||||
NodeType: "Replica",
|
|
||||||
StorageClasses: []testFlavorStorageClass{
|
|
||||||
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Cpu: 1,
|
|
||||||
Description: "flavor 1.2 replica",
|
|
||||||
Id: "flv1.2r",
|
|
||||||
MaxGB: 500,
|
|
||||||
Memory: 2,
|
|
||||||
MinGB: 5,
|
|
||||||
NodeType: "Replica",
|
|
||||||
StorageClasses: []testFlavorStorageClass{
|
|
||||||
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Cpu: 1,
|
|
||||||
Description: "flavor 1.3 replica",
|
|
||||||
Id: "flv1.3r",
|
|
||||||
MaxGB: 500,
|
|
||||||
Memory: 3,
|
|
||||||
MinGB: 5,
|
|
||||||
NodeType: "Replica",
|
|
||||||
StorageClasses: []testFlavorStorageClass{
|
|
||||||
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Cpu: 1,
|
|
||||||
Description: "flavor 1.4 replica",
|
|
||||||
Id: "flv1.4r",
|
|
||||||
MaxGB: 500,
|
|
||||||
Memory: 4,
|
|
||||||
MinGB: 5,
|
|
||||||
NodeType: "Replica",
|
|
||||||
StorageClasses: []testFlavorStorageClass{
|
|
||||||
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Cpu: 1,
|
|
||||||
Description: "flavor 1.5 replica",
|
|
||||||
Id: "flv1.5r",
|
|
||||||
MaxGB: 500,
|
|
||||||
Memory: 5,
|
|
||||||
MinGB: 5,
|
|
||||||
NodeType: "Replica",
|
|
||||||
StorageClasses: []testFlavorStorageClass{
|
|
||||||
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Cpu: 1,
|
|
||||||
Description: "flavor 1.6 replica",
|
|
||||||
Id: "flv1.6r",
|
|
||||||
MaxGB: 500,
|
|
||||||
Memory: 6,
|
|
||||||
MinGB: 5,
|
|
||||||
NodeType: "Replica",
|
|
||||||
StorageClasses: []testFlavorStorageClass{
|
|
||||||
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
/* ......................................................... */
|
|
||||||
{
|
|
||||||
Cpu: 2,
|
|
||||||
Description: "flavor 2.1 replica",
|
|
||||||
Id: "flv2.1r",
|
|
||||||
MaxGB: 500,
|
|
||||||
Memory: 1,
|
|
||||||
MinGB: 5,
|
|
||||||
NodeType: "Replica",
|
|
||||||
StorageClasses: []testFlavorStorageClass{
|
|
||||||
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Cpu: 2,
|
|
||||||
Description: "flavor 2.2 replica",
|
|
||||||
Id: "flv2.2r",
|
|
||||||
MaxGB: 500,
|
|
||||||
Memory: 2,
|
|
||||||
MinGB: 5,
|
|
||||||
NodeType: "Replica",
|
|
||||||
StorageClasses: []testFlavorStorageClass{
|
|
||||||
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Cpu: 2,
|
|
||||||
Description: "flavor 2.3 replica",
|
|
||||||
Id: "flv2.3r",
|
|
||||||
MaxGB: 500,
|
|
||||||
Memory: 3,
|
|
||||||
MinGB: 5,
|
|
||||||
NodeType: "Replica",
|
|
||||||
StorageClasses: []testFlavorStorageClass{
|
|
||||||
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Cpu: 2,
|
|
||||||
Description: "flavor 2.4 replica",
|
|
||||||
Id: "flv2.4r",
|
|
||||||
MaxGB: 500,
|
|
||||||
Memory: 4,
|
|
||||||
MinGB: 5,
|
|
||||||
NodeType: "Replica",
|
|
||||||
StorageClasses: []testFlavorStorageClass{
|
|
||||||
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Cpu: 2,
|
|
||||||
Description: "flavor 2.5 replica",
|
|
||||||
Id: "flv2.5r",
|
|
||||||
MaxGB: 500,
|
|
||||||
Memory: 5,
|
|
||||||
MinGB: 5,
|
|
||||||
NodeType: "Replica",
|
|
||||||
StorageClasses: []testFlavorStorageClass{
|
|
||||||
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Cpu: 2,
|
|
||||||
Description: "flavor 2.6 replica",
|
|
||||||
Id: "flv2.6r",
|
|
||||||
MaxGB: 500,
|
|
||||||
Memory: 6,
|
|
||||||
MinGB: 5,
|
|
||||||
NodeType: "Replica",
|
|
||||||
StorageClasses: []testFlavorStorageClass{
|
|
||||||
{Class: "sc1", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc2", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
{Class: "sc3", MaxIoPerSec: 0, MaxThroughInMb: 0},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
/* ......................................................... */
|
|
||||||
}
|
|
||||||
|
|
||||||
//nolint:unused // TODO: remove when used
|
|
||||||
func testFlavorListToResponseFlavorList(f []testFlavor) []postgresflex.ListFlavors {
|
|
||||||
result := make([]postgresflex.ListFlavors, len(f))
|
|
||||||
for i, flavor := range f {
|
|
||||||
result[i] = testFlavorToResponseFlavor(flavor)
|
|
||||||
}
|
}
|
||||||
return result
|
tests := []struct {
|
||||||
}
|
name string
|
||||||
|
args args
|
||||||
//nolint:unused // TODO: remove when used
|
}{
|
||||||
func testFlavorToResponseFlavor(f testFlavor) postgresflex.ListFlavors {
|
{
|
||||||
var scList []postgresflex.FlavorStorageClassesStorageClass
|
name: "empty connection info",
|
||||||
for _, fl := range f.StorageClasses {
|
args: args{
|
||||||
scList = append(
|
ctx: context.TODO(),
|
||||||
scList, postgresflex.FlavorStorageClassesStorageClass{
|
m: &dataSourceModel{},
|
||||||
Class: utils.Ptr(fl.Class),
|
hostName: "",
|
||||||
MaxIoPerSec: utils.Ptr(fl.MaxIoPerSec),
|
port: 0,
|
||||||
MaxThroughInMb: utils.Ptr(fl.MaxThroughInMb),
|
|
||||||
},
|
},
|
||||||
)
|
},
|
||||||
|
{
|
||||||
|
name: "empty connection info host",
|
||||||
|
args: args{
|
||||||
|
ctx: context.TODO(),
|
||||||
|
m: &dataSourceModel{},
|
||||||
|
hostName: "",
|
||||||
|
port: 1234,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "empty connection info port",
|
||||||
|
args: args{
|
||||||
|
ctx: context.TODO(),
|
||||||
|
m: &dataSourceModel{},
|
||||||
|
hostName: "hostname",
|
||||||
|
port: 0,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "valid connection info",
|
||||||
|
args: args{
|
||||||
|
ctx: context.TODO(),
|
||||||
|
m: &dataSourceModel{},
|
||||||
|
hostName: "host",
|
||||||
|
port: 1000,
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
return postgresflex.ListFlavors{
|
for _, tt := range tests {
|
||||||
Cpu: utils.Ptr(f.Cpu),
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
Description: utils.Ptr(f.Description),
|
resp := &postgresflex.GetInstanceResponse{
|
||||||
Id: utils.Ptr(f.Id),
|
ConnectionInfo: postgresflex.InstanceConnectionInfo{
|
||||||
MaxGB: utils.Ptr(f.MaxGB),
|
Write: postgresflex.InstanceConnectionInfoWrite{
|
||||||
Memory: utils.Ptr(f.Memory),
|
Host: tt.args.hostName,
|
||||||
MinGB: utils.Ptr(f.MinGB),
|
Port: int32(tt.args.port),
|
||||||
NodeType: utils.Ptr(f.NodeType),
|
},
|
||||||
StorageClasses: &scList,
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
handleConnectionInfo(tt.args.ctx, tt.args.m, resp)
|
||||||
|
|
||||||
|
if tt.args.hostName == "" || tt.args.port == 0 {
|
||||||
|
if !tt.args.m.ConnectionInfo.IsNull() {
|
||||||
|
t.Errorf("expected connection info to be null")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if tt.args.hostName != "" && tt.args.port != 0 {
|
||||||
|
res := tt.args.m.ConnectionInfo.Write.Attributes()
|
||||||
|
gotHost := ""
|
||||||
|
if r, ok := res["host"]; ok {
|
||||||
|
gotHost = utils2.RemoveQuotes(r.String())
|
||||||
|
}
|
||||||
|
if gotHost != tt.args.hostName {
|
||||||
|
t.Errorf("host value incorrect: want: %s - got: %s", tt.args.hostName, gotHost)
|
||||||
|
}
|
||||||
|
|
||||||
|
gotPort, ok := res["port"]
|
||||||
|
if !ok {
|
||||||
|
t.Errorf("could not find a value for port in connection_info.write")
|
||||||
|
}
|
||||||
|
if !gotPort.Equal(types.Int64Value(int64(tt.args.port))) {
|
||||||
|
t.Errorf("port value incorrect: want: %d - got: %s", tt.args.port, gotPort.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// func Test_getAllFlavors(t *testing.T) {
|
func Test_handleEncryption(t *testing.T) {
|
||||||
// type args struct {
|
t.Skipf("please implement")
|
||||||
// projectId string
|
type args struct {
|
||||||
// region string
|
m *dataSourceModel
|
||||||
// }
|
resp *postgresflex.GetInstanceResponse
|
||||||
// tests := []struct {
|
}
|
||||||
// name string
|
tests := []struct {
|
||||||
// args args
|
name string
|
||||||
// firstItem int
|
args args
|
||||||
// lastItem int
|
}{
|
||||||
// want []postgresflex.ListFlavors
|
// TODO: Add test cases.
|
||||||
// wantErr bool
|
}
|
||||||
// }{
|
for _, tt := range tests {
|
||||||
// {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
// name: "find exactly one flavor",
|
handleEncryption(tt.args.m, tt.args.resp)
|
||||||
// args: args{
|
t.Logf("need to implement more")
|
||||||
// projectId: "project",
|
})
|
||||||
// region: "region",
|
}
|
||||||
// },
|
}
|
||||||
// firstItem: 0,
|
|
||||||
// lastItem: 0,
|
|
||||||
// want: []postgresflex.ListFlavors{
|
|
||||||
// testFlavorToResponseFlavor(responseList[0]),
|
|
||||||
// },
|
|
||||||
// wantErr: false,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "get exactly 1 page flavors",
|
|
||||||
// args: args{
|
|
||||||
// projectId: "project",
|
|
||||||
// region: "region",
|
|
||||||
// },
|
|
||||||
// firstItem: 0,
|
|
||||||
// lastItem: 9,
|
|
||||||
// want: testFlavorListToResponseFlavorList(responseList[0:10]),
|
|
||||||
// wantErr: false,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "get exactly 20 flavors",
|
|
||||||
// args: args{
|
|
||||||
// projectId: "project",
|
|
||||||
// region: "region",
|
|
||||||
// },
|
|
||||||
// firstItem: 0,
|
|
||||||
// lastItem: 20,
|
|
||||||
// // 0 indexed therefore we want :21
|
|
||||||
// want: testFlavorListToResponseFlavorList(responseList[0:21]),
|
|
||||||
// wantErr: false,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "get all flavors",
|
|
||||||
// args: args{
|
|
||||||
// projectId: "project",
|
|
||||||
// region: "region",
|
|
||||||
// },
|
|
||||||
// firstItem: 0,
|
|
||||||
// lastItem: len(responseList),
|
|
||||||
// want: testFlavorListToResponseFlavorList(responseList),
|
|
||||||
// wantErr: false,
|
|
||||||
// },
|
|
||||||
// }
|
|
||||||
// for _, tt := range tests {
|
|
||||||
// t.Run(tt.name, func(t *testing.T) {
|
|
||||||
// first := tt.firstItem
|
|
||||||
// if first > len(responseList)-1 {
|
|
||||||
// first = len(responseList) - 1
|
|
||||||
// }
|
|
||||||
// last := tt.lastItem
|
|
||||||
// if last > len(responseList)-1 {
|
|
||||||
// last = len(responseList) - 1
|
|
||||||
// }
|
|
||||||
// mockClient := postgresFlexClientMocked{
|
|
||||||
// returnError: tt.wantErr,
|
|
||||||
// firstItem: first,
|
|
||||||
// lastItem: last,
|
|
||||||
// }
|
|
||||||
// got, err := getAllFlavors(context.TODO(), mockClient, tt.args.projectId, tt.args.region)
|
|
||||||
// if (err != nil) != tt.wantErr {
|
|
||||||
// t.Errorf("getAllFlavors() error = %v, wantErr %v", err, tt.wantErr)
|
|
||||||
// return
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// if diff := cmp.Diff(tt.want, got); diff != "" {
|
|
||||||
// t.Errorf("mismatch (-want +got):\n%s", diff)
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// if !reflect.DeepEqual(got, tt.want) {
|
|
||||||
// t.Errorf("getAllFlavors() got = %v, want %v", got, tt.want)
|
|
||||||
// }
|
|
||||||
// })
|
|
||||||
// }
|
|
||||||
//}
|
|
||||||
|
|
||||||
// func Test_loadFlavorId(t *testing.T) {
|
func Test_handleNetwork(t *testing.T) {
|
||||||
// type args struct {
|
t.Skipf("please implement")
|
||||||
// ctx context.Context
|
type args struct {
|
||||||
// model *Model
|
ctx context.Context
|
||||||
// storage *storageModel
|
m *dataSourceModel
|
||||||
// }
|
resp *postgresflex.GetInstanceResponse
|
||||||
// tests := []struct {
|
}
|
||||||
// name string
|
tests := []struct {
|
||||||
// args args
|
name string
|
||||||
// firstItem int
|
args args
|
||||||
// lastItem int
|
wantErr bool
|
||||||
// want []postgresflex.ListFlavors
|
}{
|
||||||
// wantErr bool
|
// TODO: Add test cases.
|
||||||
// }{
|
}
|
||||||
// {
|
for _, tt := range tests {
|
||||||
// name: "find a single flavor",
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
// args: args{
|
if err := handleNetwork(tt.args.ctx, tt.args.m, tt.args.resp); (err != nil) != tt.wantErr {
|
||||||
// ctx: context.Background(),
|
t.Errorf("handleNetwork() error = %v, wantErr %v", err, tt.wantErr)
|
||||||
// model: &Model{
|
}
|
||||||
// ProjectId: basetypes.NewStringValue("project"),
|
})
|
||||||
// Region: basetypes.NewStringValue("region"),
|
}
|
||||||
// },
|
}
|
||||||
// storage: &storageModel{
|
|
||||||
// Class: basetypes.NewStringValue("sc1"),
|
func Test_mapGetDataInstanceResponseToModel(t *testing.T) {
|
||||||
// Size: basetypes.NewInt64Value(100),
|
t.Skipf("please implement")
|
||||||
// },
|
type args struct {
|
||||||
// },
|
ctx context.Context
|
||||||
// firstItem: 0,
|
m *dataSourceModel
|
||||||
// lastItem: 3,
|
resp *postgresflex.GetInstanceResponse
|
||||||
// want: []postgresflex.ListFlavors{
|
}
|
||||||
// testFlavorToResponseFlavor(responseList[0]),
|
tests := []struct {
|
||||||
// },
|
name string
|
||||||
// wantErr: false,
|
args args
|
||||||
// },
|
wantErr bool
|
||||||
// {
|
}{
|
||||||
// name: "find a single flavor by replicas option",
|
// TODO: Add test cases.
|
||||||
// args: args{
|
}
|
||||||
// ctx: context.Background(),
|
for _, tt := range tests {
|
||||||
// model: &Model{
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
// ProjectId: basetypes.NewStringValue("project"),
|
if err := mapGetDataInstanceResponseToModel(tt.args.ctx, tt.args.m, tt.args.resp); (err != nil) != tt.wantErr {
|
||||||
// Region: basetypes.NewStringValue("region"),
|
t.Errorf("mapGetDataInstanceResponseToModel() error = %v, wantErr %v", err, tt.wantErr)
|
||||||
// Replicas: basetypes.NewInt64Value(1),
|
}
|
||||||
// },
|
})
|
||||||
// storage: &storageModel{
|
}
|
||||||
// Class: basetypes.NewStringValue("sc1"),
|
}
|
||||||
// Size: basetypes.NewInt64Value(100),
|
|
||||||
// },
|
func Test_mapGetInstanceResponseToModel(t *testing.T) {
|
||||||
// },
|
t.Skipf("please implement")
|
||||||
// firstItem: 0,
|
type args struct {
|
||||||
// lastItem: 3,
|
ctx context.Context
|
||||||
// want: []postgresflex.ListFlavors{
|
m *postgresflexalpharesource.InstanceModel
|
||||||
// testFlavorToResponseFlavor(responseList[0]),
|
resp *postgresflex.GetInstanceResponse
|
||||||
// },
|
}
|
||||||
// wantErr: false,
|
tests := []struct {
|
||||||
// },
|
name string
|
||||||
// {
|
args args
|
||||||
// name: "fail finding find a single flavor by replicas option",
|
wantErr bool
|
||||||
// args: args{
|
}{
|
||||||
// ctx: context.Background(),
|
// TODO: Add test cases.
|
||||||
// model: &Model{
|
}
|
||||||
// ProjectId: basetypes.NewStringValue("project"),
|
for _, tt := range tests {
|
||||||
// Region: basetypes.NewStringValue("region"),
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
// Replicas: basetypes.NewInt64Value(1),
|
if err := mapGetInstanceResponseToModel(tt.args.ctx, tt.args.m, tt.args.resp); (err != nil) != tt.wantErr {
|
||||||
// },
|
t.Errorf("mapGetInstanceResponseToModel() error = %v, wantErr %v", err, tt.wantErr)
|
||||||
// storage: &storageModel{
|
}
|
||||||
// Class: basetypes.NewStringValue("sc1"),
|
})
|
||||||
// Size: basetypes.NewInt64Value(100),
|
}
|
||||||
// },
|
}
|
||||||
// },
|
|
||||||
// firstItem: 13,
|
|
||||||
// lastItem: 23,
|
|
||||||
// want: []postgresflex.ListFlavors{},
|
|
||||||
// wantErr: true,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "find a replicas flavor lower case",
|
|
||||||
// args: args{
|
|
||||||
// ctx: context.Background(),
|
|
||||||
// model: &Model{
|
|
||||||
// ProjectId: basetypes.NewStringValue("project"),
|
|
||||||
// Region: basetypes.NewStringValue("region"),
|
|
||||||
// },
|
|
||||||
// storage: &storageModel{
|
|
||||||
// Class: basetypes.NewStringValue("sc1"),
|
|
||||||
// Size: basetypes.NewInt64Value(100),
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// firstItem: 0,
|
|
||||||
// lastItem: len(responseList) - 1,
|
|
||||||
// want: []postgresflex.ListFlavors{
|
|
||||||
// testFlavorToResponseFlavor(responseList[16]),
|
|
||||||
// },
|
|
||||||
// wantErr: false,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "find a replicas flavor CamelCase",
|
|
||||||
// args: args{
|
|
||||||
// ctx: context.Background(),
|
|
||||||
// model: &Model{
|
|
||||||
// ProjectId: basetypes.NewStringValue("project"),
|
|
||||||
// Region: basetypes.NewStringValue("region"),
|
|
||||||
// },
|
|
||||||
// storage: &storageModel{
|
|
||||||
// Class: basetypes.NewStringValue("sc1"),
|
|
||||||
// Size: basetypes.NewInt64Value(100),
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// firstItem: 0,
|
|
||||||
// lastItem: len(responseList) - 1,
|
|
||||||
// want: []postgresflex.ListFlavors{
|
|
||||||
// testFlavorToResponseFlavor(responseList[16]),
|
|
||||||
// },
|
|
||||||
// wantErr: false,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "find a replicas flavor by replicas option",
|
|
||||||
// args: args{
|
|
||||||
// ctx: context.Background(),
|
|
||||||
// model: &Model{
|
|
||||||
// ProjectId: basetypes.NewStringValue("project"),
|
|
||||||
// Region: basetypes.NewStringValue("region"),
|
|
||||||
// Replicas: basetypes.NewInt64Value(3),
|
|
||||||
// },
|
|
||||||
// flavor: &flavorModel{
|
|
||||||
// CPU: basetypes.NewInt64Value(1),
|
|
||||||
// RAM: basetypes.NewInt64Value(1),
|
|
||||||
// },
|
|
||||||
// storage: &storageModel{
|
|
||||||
// Class: basetypes.NewStringValue("sc1"),
|
|
||||||
// Size: basetypes.NewInt64Value(100),
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// firstItem: 0,
|
|
||||||
// lastItem: len(responseList) - 1,
|
|
||||||
// want: []postgresflex.ListFlavors{
|
|
||||||
// testFlavorToResponseFlavor(responseList[16]),
|
|
||||||
// },
|
|
||||||
// wantErr: false,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "fail finding a replica flavor",
|
|
||||||
// args: args{
|
|
||||||
// ctx: context.Background(),
|
|
||||||
// model: &Model{
|
|
||||||
// ProjectId: basetypes.NewStringValue("project"),
|
|
||||||
// Region: basetypes.NewStringValue("region"),
|
|
||||||
// Replicas: basetypes.NewInt64Value(3),
|
|
||||||
// },
|
|
||||||
// flavor: &flavorModel{
|
|
||||||
// CPU: basetypes.NewInt64Value(1),
|
|
||||||
// RAM: basetypes.NewInt64Value(1),
|
|
||||||
// },
|
|
||||||
// storage: &storageModel{
|
|
||||||
// Class: basetypes.NewStringValue("sc1"),
|
|
||||||
// Size: basetypes.NewInt64Value(100),
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// firstItem: 0,
|
|
||||||
// lastItem: 10,
|
|
||||||
// want: []postgresflex.ListFlavors{},
|
|
||||||
// wantErr: true,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "no flavor found error",
|
|
||||||
// args: args{
|
|
||||||
// ctx: context.Background(),
|
|
||||||
// model: &Model{
|
|
||||||
// ProjectId: basetypes.NewStringValue("project"),
|
|
||||||
// Region: basetypes.NewStringValue("region"),
|
|
||||||
// },
|
|
||||||
// flavor: &flavorModel{
|
|
||||||
// CPU: basetypes.NewInt64Value(10),
|
|
||||||
// RAM: basetypes.NewInt64Value(1000),
|
|
||||||
// NodeType: basetypes.NewStringValue("Single"),
|
|
||||||
// },
|
|
||||||
// storage: &storageModel{
|
|
||||||
// Class: basetypes.NewStringValue("sc1"),
|
|
||||||
// Size: basetypes.NewInt64Value(100),
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// firstItem: 0,
|
|
||||||
// lastItem: 3,
|
|
||||||
// want: []postgresflex.ListFlavors{},
|
|
||||||
// wantErr: true,
|
|
||||||
// },
|
|
||||||
// }
|
|
||||||
// for _, tt := range tests {
|
|
||||||
// t.Run(tt.name, func(t *testing.T) {
|
|
||||||
// first := tt.firstItem
|
|
||||||
// if first > len(responseList)-1 {
|
|
||||||
// first = len(responseList) - 1
|
|
||||||
// }
|
|
||||||
// last := tt.lastItem
|
|
||||||
// if last > len(responseList)-1 {
|
|
||||||
// last = len(responseList) - 1
|
|
||||||
// }
|
|
||||||
// mockClient := postgresFlexClientMocked{
|
|
||||||
// returnError: tt.wantErr,
|
|
||||||
// firstItem: first,
|
|
||||||
// lastItem: last,
|
|
||||||
// }
|
|
||||||
// if err := loadFlavorId(tt.args.ctx, mockClient, tt.args.model, tt.args.flavor, tt.args.storage); (err != nil) != tt.wantErr {
|
|
||||||
// t.Errorf("loadFlavorId() error = %v, wantErr %v", err, tt.wantErr)
|
|
||||||
// }
|
|
||||||
// })
|
|
||||||
// }
|
|
||||||
//}
|
|
||||||
|
|
|
||||||
|
|
@ -14,8 +14,9 @@ import (
|
||||||
"github.com/hashicorp/terraform-plugin-framework/types"
|
"github.com/hashicorp/terraform-plugin-framework/types"
|
||||||
"github.com/hashicorp/terraform-plugin-log/tflog"
|
"github.com/hashicorp/terraform-plugin-log/tflog"
|
||||||
"github.com/stackitcloud/stackit-sdk-go/core/oapierror"
|
"github.com/stackitcloud/stackit-sdk-go/core/oapierror"
|
||||||
|
coreUtils "github.com/stackitcloud/stackit-sdk-go/core/utils"
|
||||||
|
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
|
|
||||||
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
|
||||||
postgresflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/resources_gen"
|
postgresflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/resources_gen"
|
||||||
|
|
@ -50,7 +51,7 @@ type InstanceResourceIdentityModel struct {
|
||||||
|
|
||||||
// instanceResource is the resource implementation.
|
// instanceResource is the resource implementation.
|
||||||
type instanceResource struct {
|
type instanceResource struct {
|
||||||
client *postgresflex.APIClient
|
client *v3alpha1api.APIClient
|
||||||
providerData core.ProviderData
|
providerData core.ProviderData
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -195,9 +196,9 @@ func (r *instanceResource) Create(
|
||||||
|
|
||||||
ctx = core.InitProviderContext(ctx)
|
ctx = core.InitProviderContext(ctx)
|
||||||
|
|
||||||
projectId := model.ProjectId.ValueString()
|
projectID := model.ProjectId.ValueString()
|
||||||
region := model.Region.ValueString()
|
region := model.Region.ValueString()
|
||||||
ctx = tflog.SetField(ctx, "project_id", projectId)
|
ctx = tflog.SetField(ctx, "project_id", projectID)
|
||||||
ctx = tflog.SetField(ctx, "region", region)
|
ctx = tflog.SetField(ctx, "region", region)
|
||||||
|
|
||||||
var netAcl []string
|
var netAcl []string
|
||||||
|
|
@ -207,17 +208,13 @@ func (r *instanceResource) Create(
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if model.Replicas.ValueInt64() > math.MaxInt32 {
|
replVal := model.Replicas.ValueInt64() // nolint:gosec // check is performed above
|
||||||
resp.Diagnostics.AddError("invalid int32 value", "provided int64 value does not fit into int32")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
replVal := int32(model.Replicas.ValueInt64()) // nolint:gosec // check is performed above
|
|
||||||
payload := modelToCreateInstancePayload(netAcl, model, replVal)
|
payload := modelToCreateInstancePayload(netAcl, model, replVal)
|
||||||
|
|
||||||
// Create new instance
|
// Create new instance
|
||||||
createResp, err := r.client.CreateInstanceRequest(
|
createResp, err := r.client.DefaultAPI.CreateInstanceRequest(
|
||||||
ctx,
|
ctx,
|
||||||
projectId,
|
projectID,
|
||||||
region,
|
region,
|
||||||
).CreateInstanceRequestPayload(payload).Execute()
|
).CreateInstanceRequestPayload(payload).Execute()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
@ -226,7 +223,7 @@ func (r *instanceResource) Create(
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx = core.LogResponse(ctx)
|
ctx = core.LogResponse(ctx)
|
||||||
instanceId, ok := createResp.GetIdOk()
|
instanceID, ok := createResp.GetIdOk()
|
||||||
if !ok {
|
if !ok {
|
||||||
core.LogAndAddError(ctx, &resp.Diagnostics, "error creating instance", "could not find instance id in response")
|
core.LogAndAddError(ctx, &resp.Diagnostics, "error creating instance", "could not find instance id in response")
|
||||||
return
|
return
|
||||||
|
|
@ -234,16 +231,16 @@ func (r *instanceResource) Create(
|
||||||
|
|
||||||
// Set data returned by API in identity
|
// Set data returned by API in identity
|
||||||
identity := InstanceResourceIdentityModel{
|
identity := InstanceResourceIdentityModel{
|
||||||
ProjectID: types.StringValue(projectId),
|
ProjectID: types.StringValue(projectID),
|
||||||
Region: types.StringValue(region),
|
Region: types.StringValue(region),
|
||||||
InstanceID: types.StringValue(instanceId),
|
InstanceID: types.StringPointerValue(instanceID),
|
||||||
}
|
}
|
||||||
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
|
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
|
||||||
if resp.Diagnostics.HasError() {
|
if resp.Diagnostics.HasError() {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
waitResp, err := wait.CreateInstanceWaitHandler(ctx, r.client, projectId, region, instanceId).
|
waitResp, err := wait.CreateInstanceWaitHandler(ctx, r.client.DefaultAPI, projectID, region, *instanceID).
|
||||||
WaitWithContext(ctx)
|
WaitWithContext(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
core.LogAndAddError(
|
core.LogAndAddError(
|
||||||
|
|
@ -276,37 +273,35 @@ func (r *instanceResource) Create(
|
||||||
}
|
}
|
||||||
|
|
||||||
func modelToCreateInstancePayload(
|
func modelToCreateInstancePayload(
|
||||||
netAcl []string,
|
netACL []string,
|
||||||
model postgresflexalpha.InstanceModel,
|
model postgresflexalpha.InstanceModel,
|
||||||
replVal int32,
|
replVal int64,
|
||||||
) postgresflex.CreateInstanceRequestPayload {
|
) v3alpha1api.CreateInstanceRequestPayload {
|
||||||
var enc *postgresflex.InstanceEncryption
|
var enc *v3alpha1api.InstanceEncryption
|
||||||
if !model.Encryption.IsNull() && !model.Encryption.IsUnknown() {
|
if !model.Encryption.IsNull() && !model.Encryption.IsUnknown() {
|
||||||
enc = &postgresflex.InstanceEncryption{
|
enc = &v3alpha1api.InstanceEncryption{
|
||||||
KekKeyId: model.Encryption.KekKeyId.ValueStringPointer(),
|
KekKeyId: model.Encryption.KekKeyId.ValueString(),
|
||||||
KekKeyRingId: model.Encryption.KekKeyRingId.ValueStringPointer(),
|
KekKeyRingId: model.Encryption.KekKeyRingId.ValueString(),
|
||||||
KekKeyVersion: model.Encryption.KekKeyVersion.ValueStringPointer(),
|
KekKeyVersion: model.Encryption.KekKeyVersion.ValueString(),
|
||||||
ServiceAccount: model.Encryption.ServiceAccount.ValueStringPointer(),
|
ServiceAccount: model.Encryption.ServiceAccount.ValueString(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
payload := postgresflex.CreateInstanceRequestPayload{
|
payload := v3alpha1api.CreateInstanceRequestPayload{
|
||||||
BackupSchedule: model.BackupSchedule.ValueStringPointer(),
|
BackupSchedule: model.BackupSchedule.ValueString(),
|
||||||
Encryption: enc,
|
Encryption: enc,
|
||||||
FlavorId: model.FlavorId.ValueStringPointer(),
|
FlavorId: model.FlavorId.ValueString(),
|
||||||
Name: model.Name.ValueStringPointer(),
|
Name: model.Name.ValueString(),
|
||||||
Network: &postgresflex.InstanceNetworkCreate{
|
Network: v3alpha1api.InstanceNetworkCreate{
|
||||||
AccessScope: postgresflex.InstanceNetworkGetAccessScopeAttributeType(
|
AccessScope: (*v3alpha1api.InstanceNetworkAccessScope)(model.Network.AccessScope.ValueStringPointer()),
|
||||||
model.Network.AccessScope.ValueStringPointer(),
|
Acl: netACL,
|
||||||
),
|
|
||||||
Acl: &netAcl,
|
|
||||||
},
|
},
|
||||||
Replicas: postgresflex.CreateInstanceRequestPayloadGetReplicasAttributeType(&replVal),
|
Replicas: v3alpha1api.Replicas(replVal), //nolint:gosec // TODO
|
||||||
RetentionDays: model.RetentionDays.ValueInt64Pointer(),
|
RetentionDays: int32(model.RetentionDays.ValueInt64()), //nolint:gosec // TODO
|
||||||
Storage: &postgresflex.StorageCreate{
|
Storage: v3alpha1api.StorageCreate{
|
||||||
PerformanceClass: model.Storage.PerformanceClass.ValueStringPointer(),
|
PerformanceClass: model.Storage.PerformanceClass.ValueString(),
|
||||||
Size: model.Storage.Size.ValueInt64Pointer(),
|
Size: int32(model.Storage.Size.ValueInt64()), //nolint:gosec // TODO
|
||||||
},
|
},
|
||||||
Version: model.Version.ValueStringPointer(),
|
Version: model.Version.ValueString(),
|
||||||
}
|
}
|
||||||
return payload
|
return payload
|
||||||
}
|
}
|
||||||
|
|
@ -347,7 +342,7 @@ func (r *instanceResource) Read(
|
||||||
ctx = tflog.SetField(ctx, "instance_id", instanceId)
|
ctx = tflog.SetField(ctx, "instance_id", instanceId)
|
||||||
ctx = tflog.SetField(ctx, "region", region)
|
ctx = tflog.SetField(ctx, "region", region)
|
||||||
|
|
||||||
instanceResp, err := r.client.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
|
instanceResp, err := r.client.DefaultAPI.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
|
oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
|
||||||
if ok && oapiErr.StatusCode == http.StatusNotFound {
|
if ok && oapiErr.StatusCode == http.StatusNotFound {
|
||||||
|
|
@ -366,7 +361,7 @@ func (r *instanceResource) Read(
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if !model.InstanceId.IsUnknown() && !model.InstanceId.IsNull() {
|
if !model.InstanceId.IsUnknown() && !model.InstanceId.IsNull() {
|
||||||
if respInstanceID != instanceId {
|
if *respInstanceID != instanceId {
|
||||||
core.LogAndAddError(
|
core.LogAndAddError(
|
||||||
ctx,
|
ctx,
|
||||||
&resp.Diagnostics,
|
&resp.Diagnostics,
|
||||||
|
|
@ -431,47 +426,56 @@ func (r *instanceResource) Update(
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
projectId := identityData.ProjectID.ValueString()
|
projectID := identityData.ProjectID.ValueString()
|
||||||
instanceId := identityData.InstanceID.ValueString()
|
instanceID := identityData.InstanceID.ValueString()
|
||||||
region := model.Region.ValueString()
|
region := model.Region.ValueString()
|
||||||
ctx = tflog.SetField(ctx, "project_id", projectId)
|
ctx = tflog.SetField(ctx, "project_id", projectID)
|
||||||
ctx = tflog.SetField(ctx, "instance_id", instanceId)
|
ctx = tflog.SetField(ctx, "instance_id", instanceID)
|
||||||
ctx = tflog.SetField(ctx, "region", region)
|
ctx = tflog.SetField(ctx, "region", region)
|
||||||
|
|
||||||
var netAcl []string
|
var netACL []string
|
||||||
diag := model.Network.Acl.ElementsAs(ctx, &netAcl, false)
|
diag := model.Network.Acl.ElementsAs(ctx, &netACL, false)
|
||||||
resp.Diagnostics.Append(diags...)
|
resp.Diagnostics.Append(diags...)
|
||||||
if diag.HasError() {
|
if diag.HasError() {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if model.Replicas.ValueInt64() > math.MaxInt32 {
|
if model.Replicas.ValueInt64() > math.MaxInt32 {
|
||||||
resp.Diagnostics.AddError("invalid int32 value", "provided int64 value does not fit into int32")
|
core.LogAndAddError(ctx, &resp.Diagnostics, "UPDATE", "replicas value too large for int32")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
replInt32 := int32(model.Replicas.ValueInt64()) // nolint:gosec // check is performed above
|
|
||||||
|
|
||||||
payload := postgresflex.UpdateInstanceRequestPayload{
|
if model.RetentionDays.ValueInt64() > math.MaxInt32 {
|
||||||
BackupSchedule: model.BackupSchedule.ValueStringPointer(),
|
core.LogAndAddError(ctx, &resp.Diagnostics, "UPDATE", "retention_days value too large for int32")
|
||||||
FlavorId: model.FlavorId.ValueStringPointer(),
|
return
|
||||||
Name: model.Name.ValueStringPointer(),
|
}
|
||||||
Network: &postgresflex.InstanceNetworkUpdate{
|
|
||||||
Acl: &netAcl,
|
if model.Storage.Size.ValueInt64() > math.MaxInt32 {
|
||||||
|
core.LogAndAddError(ctx, &resp.Diagnostics, "UPDATE", "storage.size value too large for int32")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
payload := v3alpha1api.UpdateInstanceRequestPayload{
|
||||||
|
BackupSchedule: model.BackupSchedule.ValueString(),
|
||||||
|
FlavorId: model.FlavorId.ValueString(),
|
||||||
|
Name: model.Name.ValueString(),
|
||||||
|
Network: v3alpha1api.InstanceNetworkUpdate{
|
||||||
|
Acl: netACL,
|
||||||
},
|
},
|
||||||
Replicas: postgresflex.UpdateInstanceRequestPayloadGetReplicasAttributeType(&replInt32),
|
Replicas: v3alpha1api.Replicas(model.Replicas.ValueInt64()), //nolint:gosec // checked above
|
||||||
RetentionDays: model.RetentionDays.ValueInt64Pointer(),
|
RetentionDays: int32(model.RetentionDays.ValueInt64()), //nolint:gosec // checked above
|
||||||
Storage: &postgresflex.StorageUpdate{
|
Storage: v3alpha1api.StorageUpdate{
|
||||||
Size: model.Storage.Size.ValueInt64Pointer(),
|
Size: coreUtils.Ptr(int32(model.Storage.Size.ValueInt64())), //nolint:gosec // checked above
|
||||||
},
|
},
|
||||||
Version: model.Version.ValueStringPointer(),
|
Version: model.Version.ValueString(),
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update existing instance
|
// Update existing instance
|
||||||
err := r.client.UpdateInstanceRequest(
|
err := r.client.DefaultAPI.UpdateInstanceRequest(
|
||||||
ctx,
|
ctx,
|
||||||
projectId,
|
projectID,
|
||||||
region,
|
region,
|
||||||
instanceId,
|
instanceID,
|
||||||
).UpdateInstanceRequestPayload(payload).Execute()
|
).UpdateInstanceRequestPayload(payload).Execute()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating instance", err.Error())
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating instance", err.Error())
|
||||||
|
|
@ -482,10 +486,10 @@ func (r *instanceResource) Update(
|
||||||
|
|
||||||
waitResp, err := wait.PartialUpdateInstanceWaitHandler(
|
waitResp, err := wait.PartialUpdateInstanceWaitHandler(
|
||||||
ctx,
|
ctx,
|
||||||
r.client,
|
r.client.DefaultAPI,
|
||||||
projectId,
|
projectID,
|
||||||
region,
|
region,
|
||||||
instanceId,
|
instanceID,
|
||||||
).WaitWithContext(ctx)
|
).WaitWithContext(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
core.LogAndAddError(
|
core.LogAndAddError(
|
||||||
|
|
@ -540,7 +544,7 @@ func (r *instanceResource) Delete(
|
||||||
ctx = tflog.SetField(ctx, "region", region)
|
ctx = tflog.SetField(ctx, "region", region)
|
||||||
|
|
||||||
// Delete existing instance
|
// Delete existing instance
|
||||||
err := r.client.DeleteInstanceRequest(ctx, projectId, region, instanceId).Execute()
|
err := r.client.DefaultAPI.DeleteInstanceRequest(ctx, projectId, region, instanceId).Execute()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting instance", fmt.Sprintf("Calling API: %v", err))
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting instance", fmt.Sprintf("Calling API: %v", err))
|
||||||
return
|
return
|
||||||
|
|
@ -548,7 +552,7 @@ func (r *instanceResource) Delete(
|
||||||
|
|
||||||
ctx = core.LogResponse(ctx)
|
ctx = core.LogResponse(ctx)
|
||||||
|
|
||||||
_, err = r.client.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
|
_, err = r.client.DefaultAPI.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
|
oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
|
||||||
if ok && oapiErr.StatusCode != http.StatusNotFound {
|
if ok && oapiErr.StatusCode != http.StatusNotFound {
|
||||||
|
|
|
||||||
|
|
@ -30,8 +30,8 @@ func InstanceResourceSchema(ctx context.Context) schema.Schema {
|
||||||
},
|
},
|
||||||
"backup_schedule": schema.StringAttribute{
|
"backup_schedule": schema.StringAttribute{
|
||||||
Required: true,
|
Required: true,
|
||||||
Description: "The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.",
|
Description: "The schedule for when the database backup will be created. Currently, ONLY daily schedules are supported (every 24 hours). The schedule is written as a cron schedule.",
|
||||||
MarkdownDescription: "The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.",
|
MarkdownDescription: "The schedule for when the database backup will be created. Currently, ONLY daily schedules are supported (every 24 hours). The schedule is written as a cron schedule.",
|
||||||
},
|
},
|
||||||
"connection_info": schema.SingleNestedAttribute{
|
"connection_info": schema.SingleNestedAttribute{
|
||||||
Attributes: map[string]schema.Attribute{
|
Attributes: map[string]schema.Attribute{
|
||||||
|
|
|
||||||
|
|
@ -5,17 +5,23 @@ import (
|
||||||
_ "embed"
|
_ "embed"
|
||||||
"fmt"
|
"fmt"
|
||||||
"log"
|
"log"
|
||||||
|
"math"
|
||||||
"os"
|
"os"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/hashicorp/terraform-plugin-testing/helper/acctest"
|
"github.com/hashicorp/terraform-plugin-testing/helper/acctest"
|
||||||
"github.com/hashicorp/terraform-plugin-testing/helper/resource"
|
"github.com/hashicorp/terraform-plugin-testing/helper/resource"
|
||||||
|
"github.com/hashicorp/terraform-plugin-testing/terraform"
|
||||||
"github.com/stackitcloud/stackit-sdk-go/core/config"
|
"github.com/stackitcloud/stackit-sdk-go/core/config"
|
||||||
|
"github.com/stackitcloud/stackit-sdk-go/core/utils"
|
||||||
|
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
|
|
||||||
postgresflexalpha2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
|
||||||
postgresflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance"
|
postgresflexalphaInstance "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance"
|
||||||
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/postgresflexalpha"
|
||||||
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/internal/testutils"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/internal/testutils"
|
||||||
// The fwresource import alias is so there is no collision
|
// The fwresource import alias is so there is no collision
|
||||||
|
|
@ -26,61 +32,15 @@ import (
|
||||||
|
|
||||||
const pfx = "stackitprivatepreview_postgresflexalpha"
|
const pfx = "stackitprivatepreview_postgresflexalpha"
|
||||||
|
|
||||||
var testInstances []string
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
sweeperName := fmt.Sprintf("%s_%s", pfx, "sweeper")
|
|
||||||
resource.AddTestSweepers(
|
|
||||||
sweeperName, &resource.Sweeper{
|
|
||||||
Name: sweeperName,
|
|
||||||
F: func(_ string) error { // region is passed by the testing framework
|
|
||||||
ctx := context.Background()
|
|
||||||
apiClientConfigOptions := []config.ConfigurationOption{}
|
|
||||||
apiClient, err := postgresflexalpha2.NewAPIClient(apiClientConfigOptions...)
|
|
||||||
if err != nil {
|
|
||||||
log.Fatalln(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
instances, err := apiClient.ListInstancesRequest(ctx, testutils.ProjectId, testutils.Region).
|
|
||||||
Size(100).
|
|
||||||
Execute()
|
|
||||||
if err != nil {
|
|
||||||
log.Fatalln(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, inst := range instances.GetInstances() {
|
|
||||||
if strings.HasPrefix(inst.GetName(), "tf-acc-") {
|
|
||||||
for _, item := range testInstances {
|
|
||||||
if inst.GetName() == item {
|
|
||||||
delErr := apiClient.DeleteInstanceRequestExecute(
|
|
||||||
ctx,
|
|
||||||
testutils.ProjectId,
|
|
||||||
testutils.Region,
|
|
||||||
inst.GetId(),
|
|
||||||
)
|
|
||||||
if delErr != nil {
|
|
||||||
// TODO: maybe just warn?
|
|
||||||
log.Fatalln(delErr)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestInstanceResourceSchema(t *testing.T) {
|
func TestInstanceResourceSchema(t *testing.T) {
|
||||||
t.Parallel()
|
// t.Parallel()
|
||||||
|
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
schemaRequest := fwresource.SchemaRequest{}
|
schemaRequest := fwresource.SchemaRequest{}
|
||||||
schemaResponse := &fwresource.SchemaResponse{}
|
schemaResponse := &fwresource.SchemaResponse{}
|
||||||
|
|
||||||
// Instantiate the resource.Resource and call its Schema method
|
// Instantiate the resource.Resource and call its Schema method
|
||||||
postgresflexalpha.NewInstanceResource().Schema(ctx, schemaRequest, schemaResponse)
|
postgresflexalphaInstance.NewInstanceResource().Schema(ctx, schemaRequest, schemaResponse)
|
||||||
|
|
||||||
if schemaResponse.Diagnostics.HasError() {
|
if schemaResponse.Diagnostics.HasError() {
|
||||||
t.Fatalf("Schema method diagnostics: %+v", schemaResponse.Diagnostics)
|
t.Fatalf("Schema method diagnostics: %+v", schemaResponse.Diagnostics)
|
||||||
|
|
@ -94,14 +54,6 @@ func TestInstanceResourceSchema(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
|
||||||
//go:embed testdata/resource-no-enc.tf
|
|
||||||
resourceConfigNoEnc string //nolint:unused // needs implementation
|
|
||||||
|
|
||||||
//go:embed testdata/resource-enc.tf
|
|
||||||
resourceConfigEnc string //nolint:unused // needs implementation
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestMain(m *testing.M) {
|
func TestMain(m *testing.M) {
|
||||||
testutils.Setup()
|
testutils.Setup()
|
||||||
code := m.Run()
|
code := m.Run()
|
||||||
|
|
@ -115,44 +67,23 @@ func testAccPreCheck(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// func TestAccResourceExample_parallel(t *testing.T) {
|
|
||||||
// t.Parallel()
|
|
||||||
//
|
|
||||||
// exData := resData{
|
|
||||||
// Region: "eu01",
|
|
||||||
// ServiceAccountFilePath: sa_file,
|
|
||||||
// ProjectID: project_id,
|
|
||||||
// Name: acctest.RandomWithPrefix("tf-acc"),
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// resource.Test(t, resource.TestCase{
|
|
||||||
// ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
|
||||||
// Steps: []resource.TestStep{
|
|
||||||
// {
|
|
||||||
// Config: testAccResourceEncryptionExampleConfig(exData),
|
|
||||||
// Check: resource.TestCheckResourceAttrSet("example_resource.test", "id"),
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// })
|
|
||||||
//}
|
|
||||||
|
|
||||||
type resData struct {
|
type resData struct {
|
||||||
ServiceAccountFilePath string
|
ServiceAccountFilePath string
|
||||||
ProjectId string
|
ProjectID string
|
||||||
Region string
|
Region string
|
||||||
Name string
|
Name string
|
||||||
TfName string
|
TfName string
|
||||||
FlavorId string
|
FlavorID string
|
||||||
BackupSchedule string
|
BackupSchedule string
|
||||||
UseEncryption bool
|
UseEncryption bool
|
||||||
KekKeyId string
|
KekKeyID string
|
||||||
KekKeyRingId string
|
KekKeyRingID string
|
||||||
KekKeyVersion uint8
|
KekKeyVersion uint8
|
||||||
KekServiceAccount string
|
KekServiceAccount string
|
||||||
PerformanceClass string
|
PerformanceClass string
|
||||||
Replicas uint32
|
Replicas uint32
|
||||||
Size uint32
|
Size uint32
|
||||||
AclString string
|
ACLString string
|
||||||
AccessScope string
|
AccessScope string
|
||||||
RetentionDays uint32
|
RetentionDays uint32
|
||||||
Version string
|
Version string
|
||||||
|
|
@ -162,13 +93,13 @@ type resData struct {
|
||||||
|
|
||||||
type User struct {
|
type User struct {
|
||||||
Name string
|
Name string
|
||||||
ProjectId string
|
ProjectID string
|
||||||
Roles []string
|
Roles []string
|
||||||
}
|
}
|
||||||
|
|
||||||
type Database struct {
|
type Database struct {
|
||||||
Name string
|
Name string
|
||||||
ProjectId string
|
ProjectID string
|
||||||
Owner string
|
Owner string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -177,17 +108,17 @@ func getExample() resData {
|
||||||
return resData{
|
return resData{
|
||||||
Region: os.Getenv("TF_ACC_REGION"),
|
Region: os.Getenv("TF_ACC_REGION"),
|
||||||
ServiceAccountFilePath: os.Getenv("TF_ACC_SERVICE_ACCOUNT_FILE"),
|
ServiceAccountFilePath: os.Getenv("TF_ACC_SERVICE_ACCOUNT_FILE"),
|
||||||
ProjectId: os.Getenv("TF_ACC_PROJECT_ID"),
|
ProjectID: os.Getenv("TF_ACC_PROJECT_ID"),
|
||||||
Name: name,
|
Name: name,
|
||||||
TfName: name,
|
TfName: name,
|
||||||
FlavorId: "2.4",
|
FlavorID: "2.4",
|
||||||
BackupSchedule: "0 0 * * *",
|
BackupSchedule: "0 0 * * *",
|
||||||
UseEncryption: false,
|
UseEncryption: false,
|
||||||
RetentionDays: 33,
|
RetentionDays: 33,
|
||||||
Replicas: 1,
|
Replicas: 1,
|
||||||
PerformanceClass: "premium-perf2-stackit",
|
PerformanceClass: "premium-perf2-stackit",
|
||||||
Size: 10,
|
Size: 10,
|
||||||
AclString: "0.0.0.0/0",
|
ACLString: "0.0.0.0/0",
|
||||||
AccessScope: "PUBLIC",
|
AccessScope: "PUBLIC",
|
||||||
Version: "17",
|
Version: "17",
|
||||||
}
|
}
|
||||||
|
|
@ -202,28 +133,103 @@ func TestAccInstance(t *testing.T) {
|
||||||
updSizeData := exData
|
updSizeData := exData
|
||||||
updSizeData.Size = 25
|
updSizeData.Size = 25
|
||||||
|
|
||||||
|
updBackupSched := updSizeData
|
||||||
|
// api should complain about more than one daily backup
|
||||||
|
updBackupSched.BackupSchedule = "30 3 * * *"
|
||||||
|
|
||||||
|
/*
|
||||||
|
{
|
||||||
|
"backupSchedule": "6 6 * * *",
|
||||||
|
"flavorId": "1.2",
|
||||||
|
"name": "postgres-instance",
|
||||||
|
"network": {
|
||||||
|
"acl": [
|
||||||
|
"198.51.100.0/24"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"replicas": 1,
|
||||||
|
"retentionDays": 35,
|
||||||
|
"storage": {
|
||||||
|
"size": 10
|
||||||
|
},
|
||||||
|
"version": "string"
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
testItemID := testutils.ResStr(pfx, "instance", exData.TfName)
|
||||||
|
|
||||||
resource.ParallelTest(
|
resource.ParallelTest(
|
||||||
t, resource.TestCase{
|
t, resource.TestCase{
|
||||||
PreCheck: func() {
|
PreCheck: func() {
|
||||||
testAccPreCheck(t)
|
testAccPreCheck(t)
|
||||||
t.Logf(" ... working on instance %s", exData.TfName)
|
t.Logf(" ... working on instance %s", exData.TfName)
|
||||||
testInstances = append(testInstances, exData.TfName)
|
|
||||||
},
|
},
|
||||||
|
CheckDestroy: testAccCheckPostgresFlexDestroy,
|
||||||
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
||||||
Steps: []resource.TestStep{
|
Steps: []resource.TestStep{
|
||||||
// Create and verify
|
// Create and verify
|
||||||
{
|
{
|
||||||
|
//PreConfig: func() {
|
||||||
|
// //
|
||||||
|
// },
|
||||||
Config: testutils.StringFromTemplateMust(
|
Config: testutils.StringFromTemplateMust(
|
||||||
"testdata/instance_template.gompl",
|
"testdata/instance_template.gompl",
|
||||||
exData,
|
exData,
|
||||||
),
|
),
|
||||||
Check: resource.ComposeAggregateTestCheckFunc(
|
Check: resource.ComposeAggregateTestCheckFunc(
|
||||||
resource.TestCheckResourceAttr(
|
// check params acl count
|
||||||
testutils.ResStr(pfx, "instance", exData.TfName),
|
resource.TestCheckResourceAttr(testItemID, "acl.#", "1"),
|
||||||
"name",
|
|
||||||
exData.Name,
|
// check params are set
|
||||||
),
|
resource.TestCheckResourceAttrSet(testItemID, "backup_schedule"),
|
||||||
resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "instance", exData.TfName), "id"),
|
|
||||||
|
//// connection_info should contain 1 sub entry
|
||||||
|
// resource.TestCheckResourceAttr(testItemID, "connection_info.%", "1"),
|
||||||
|
//
|
||||||
|
//// connection_info.write should contain 2 sub entries
|
||||||
|
// resource.TestCheckResourceAttr(testItemID, "connection_info.write", "2"),
|
||||||
|
//
|
||||||
|
// resource.TestCheckResourceAttrSet(testItemID, "connection_info.write.host"),
|
||||||
|
// resource.TestCheckResourceAttrSet(testItemID, "connection_info.write.port"),
|
||||||
|
|
||||||
|
resource.TestCheckResourceAttrSet(testItemID, "flavor_id"),
|
||||||
|
resource.TestCheckResourceAttrSet(testItemID, "id"),
|
||||||
|
resource.TestCheckResourceAttrSet(testItemID, "instance_id"),
|
||||||
|
resource.TestCheckResourceAttrSet(testItemID, "is_deletable"),
|
||||||
|
resource.TestCheckResourceAttrSet(testItemID, "name"),
|
||||||
|
|
||||||
|
// network should contain 4 sub entries
|
||||||
|
resource.TestCheckResourceAttr(testItemID, "network.%", "4"),
|
||||||
|
|
||||||
|
resource.TestCheckResourceAttrSet(testItemID, "network.access_scope"),
|
||||||
|
|
||||||
|
// on unencrypted instances we expect this to be empty
|
||||||
|
resource.TestCheckResourceAttr(testItemID, "network.instance_address", ""),
|
||||||
|
resource.TestCheckResourceAttr(testItemID, "network.router_address", ""),
|
||||||
|
|
||||||
|
// only one acl entry should be set
|
||||||
|
resource.TestCheckResourceAttr(testItemID, "network.acl.#", "1"),
|
||||||
|
|
||||||
|
resource.TestCheckResourceAttrSet(testItemID, "replicas"),
|
||||||
|
resource.TestCheckResourceAttrSet(testItemID, "retention_days"),
|
||||||
|
resource.TestCheckResourceAttrSet(testItemID, "status"),
|
||||||
|
|
||||||
|
// storage should contain 2 sub entries
|
||||||
|
resource.TestCheckResourceAttr(testItemID, "storage.%", "2"),
|
||||||
|
|
||||||
|
resource.TestCheckResourceAttrSet(testItemID, "storage.performance_class"),
|
||||||
|
resource.TestCheckResourceAttrSet(testItemID, "storage.size"),
|
||||||
|
resource.TestCheckResourceAttrSet(testItemID, "version"),
|
||||||
|
|
||||||
|
// check absent attr
|
||||||
|
resource.TestCheckNoResourceAttr(testItemID, "encryption"),
|
||||||
|
resource.TestCheckNoResourceAttr(testItemID, "encryption.kek_key_id"),
|
||||||
|
resource.TestCheckNoResourceAttr(testItemID, "encryption.kek_key_ring_id"),
|
||||||
|
resource.TestCheckNoResourceAttr(testItemID, "encryption.kek_key_version"),
|
||||||
|
resource.TestCheckNoResourceAttr(testItemID, "encryption.service_account"),
|
||||||
|
|
||||||
|
// check param values
|
||||||
|
resource.TestCheckResourceAttr(testItemID, "name", exData.Name),
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
// Update name and verify
|
// Update name and verify
|
||||||
|
|
@ -254,6 +260,20 @@ func TestAccInstance(t *testing.T) {
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
|
// Update backup schedule
|
||||||
|
{
|
||||||
|
Config: testutils.StringFromTemplateMust(
|
||||||
|
"testdata/instance_template.gompl",
|
||||||
|
updBackupSched,
|
||||||
|
),
|
||||||
|
Check: resource.ComposeTestCheckFunc(
|
||||||
|
resource.TestCheckResourceAttr(
|
||||||
|
testutils.ResStr(pfx, "instance", exData.TfName),
|
||||||
|
"backup_schedule",
|
||||||
|
updBackupSched.BackupSchedule,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
},
|
||||||
//// Import test
|
//// Import test
|
||||||
//{
|
//{
|
||||||
// ResourceName: "example_resource.test",
|
// ResourceName: "example_resource.test",
|
||||||
|
|
@ -272,7 +292,7 @@ func TestAccInstanceWithUsers(t *testing.T) {
|
||||||
data.Users = []User{
|
data.Users = []User{
|
||||||
{
|
{
|
||||||
Name: userName,
|
Name: userName,
|
||||||
ProjectId: os.Getenv("TF_ACC_PROJECT_ID"),
|
ProjectID: os.Getenv("TF_ACC_PROJECT_ID"),
|
||||||
Roles: []string{"login"},
|
Roles: []string{"login"},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
@ -282,8 +302,8 @@ func TestAccInstanceWithUsers(t *testing.T) {
|
||||||
PreCheck: func() {
|
PreCheck: func() {
|
||||||
testAccPreCheck(t)
|
testAccPreCheck(t)
|
||||||
t.Logf(" ... working on instance %s", data.TfName)
|
t.Logf(" ... working on instance %s", data.TfName)
|
||||||
testInstances = append(testInstances, data.TfName)
|
|
||||||
},
|
},
|
||||||
|
CheckDestroy: testAccCheckPostgresFlexDestroy,
|
||||||
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
||||||
Steps: []resource.TestStep{
|
Steps: []resource.TestStep{
|
||||||
// Create and verify
|
// Create and verify
|
||||||
|
|
@ -316,7 +336,7 @@ func TestAccInstanceWithDatabases(t *testing.T) {
|
||||||
data.Users = []User{
|
data.Users = []User{
|
||||||
{
|
{
|
||||||
Name: userName,
|
Name: userName,
|
||||||
ProjectId: os.Getenv("TF_ACC_PROJECT_ID"),
|
ProjectID: os.Getenv("TF_ACC_PROJECT_ID"),
|
||||||
Roles: []string{"login"},
|
Roles: []string{"login"},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
@ -324,7 +344,7 @@ func TestAccInstanceWithDatabases(t *testing.T) {
|
||||||
data.Databases = []Database{
|
data.Databases = []Database{
|
||||||
{
|
{
|
||||||
Name: dbName,
|
Name: dbName,
|
||||||
ProjectId: os.Getenv("TF_ACC_PROJECT_ID"),
|
ProjectID: os.Getenv("TF_ACC_PROJECT_ID"),
|
||||||
Owner: userName,
|
Owner: userName,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
@ -334,8 +354,95 @@ func TestAccInstanceWithDatabases(t *testing.T) {
|
||||||
PreCheck: func() {
|
PreCheck: func() {
|
||||||
testAccPreCheck(t)
|
testAccPreCheck(t)
|
||||||
t.Logf(" ... working on instance %s", data.TfName)
|
t.Logf(" ... working on instance %s", data.TfName)
|
||||||
testInstances = append(testInstances, data.TfName)
|
|
||||||
},
|
},
|
||||||
|
CheckDestroy: testAccCheckPostgresFlexDestroy,
|
||||||
|
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
||||||
|
Steps: []resource.TestStep{
|
||||||
|
// Create and verify
|
||||||
|
{
|
||||||
|
Config: testutils.StringFromTemplateMust(
|
||||||
|
"testdata/instance_template.gompl",
|
||||||
|
data,
|
||||||
|
),
|
||||||
|
Check: resource.ComposeAggregateTestCheckFunc(
|
||||||
|
resource.TestCheckResourceAttr(
|
||||||
|
testutils.ResStr(pfx, "instance", data.TfName),
|
||||||
|
"name",
|
||||||
|
data.Name,
|
||||||
|
),
|
||||||
|
resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "instance", data.TfName), "id"),
|
||||||
|
resource.TestCheckResourceAttr(testutils.ResStr(pfx, "user", userName), "name", userName),
|
||||||
|
resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "user", userName), "id"),
|
||||||
|
resource.TestCheckResourceAttr(testutils.ResStr(pfx, "database", dbName), "name", dbName),
|
||||||
|
resource.TestCheckResourceAttr(testutils.ResStr(pfx, "database", dbName), "owner", userName),
|
||||||
|
resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "database", dbName), "id"),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAccEncryptedInstanceWithDatabases(t *testing.T) {
|
||||||
|
encKekKeyID, ok := os.LookupEnv("TF_ACC_KEK_KEY_ID")
|
||||||
|
if !ok || encKekKeyID == "" {
|
||||||
|
t.Skip("env var TF_ACC_KEK_KEY_ID needed for encryption test")
|
||||||
|
}
|
||||||
|
|
||||||
|
encKekKeyRingID, ok := os.LookupEnv("TF_ACC_KEK_KEY_RING_ID")
|
||||||
|
if !ok || encKekKeyRingID == "" {
|
||||||
|
t.Skip("env var TF_ACC_KEK_KEY_RING_ID needed for encryption test")
|
||||||
|
}
|
||||||
|
|
||||||
|
encKekKeyVersion, ok := os.LookupEnv("TF_ACC_KEK_KEY_VERSION")
|
||||||
|
if !ok || encKekKeyVersion == "" {
|
||||||
|
t.Skip("env var TF_ACC_KEK_KEY_VERSION needed for encryption test")
|
||||||
|
}
|
||||||
|
|
||||||
|
encSvcAcc, ok := os.LookupEnv("TF_ACC_KEK_SERVICE_ACCOUNT")
|
||||||
|
if !ok || encSvcAcc == "" {
|
||||||
|
t.Skip("env var TF_ACC_KEK_SERVICE_ACCOUNT needed for encryption test")
|
||||||
|
}
|
||||||
|
|
||||||
|
data := getExample()
|
||||||
|
data.UseEncryption = true
|
||||||
|
data.KekKeyID = encKekKeyID
|
||||||
|
data.KekKeyRingID = encKekKeyRingID
|
||||||
|
data.KekServiceAccount = encSvcAcc
|
||||||
|
encKekKeyVersionInt, err := strconv.Atoi(encKekKeyVersion)
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("error converting string to int")
|
||||||
|
}
|
||||||
|
if encKekKeyVersionInt > math.MaxUint8 {
|
||||||
|
t.Errorf("value too large to convert to uint8")
|
||||||
|
}
|
||||||
|
data.KekKeyVersion = uint8(encKekKeyVersionInt) //nolint:gosec // handled above
|
||||||
|
|
||||||
|
dbName := "testdb"
|
||||||
|
userName := "testUser"
|
||||||
|
data.Users = []User{
|
||||||
|
{
|
||||||
|
Name: userName,
|
||||||
|
ProjectID: os.Getenv("TF_ACC_PROJECT_ID"),
|
||||||
|
Roles: []string{"login"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
data.Databases = []Database{
|
||||||
|
{
|
||||||
|
Name: dbName,
|
||||||
|
ProjectID: os.Getenv("TF_ACC_PROJECT_ID"),
|
||||||
|
Owner: userName,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
resource.ParallelTest(
|
||||||
|
t, resource.TestCase{
|
||||||
|
PreCheck: func() {
|
||||||
|
testAccPreCheck(t)
|
||||||
|
t.Logf(" ... working on instance %s", data.TfName)
|
||||||
|
},
|
||||||
|
CheckDestroy: testAccCheckPostgresFlexDestroy,
|
||||||
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
||||||
Steps: []resource.TestStep{
|
Steps: []resource.TestStep{
|
||||||
// Create and verify
|
// Create and verify
|
||||||
|
|
@ -402,19 +509,6 @@ func TestAccInstanceWithDatabases(t *testing.T) {
|
||||||
// // Run unit tests against mock
|
// // Run unit tests against mock
|
||||||
//}
|
//}
|
||||||
|
|
||||||
// type postgresFlexClientMocked struct {
|
|
||||||
// returnError bool
|
|
||||||
// getFlavorsResp *postgresflex.GetFlavorsResponse
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// func (c *postgresFlexClientMocked) ListFlavorsExecute(_ context.Context, _, _ string) (*postgresflex.GetFlavorsResponse, error) {
|
|
||||||
// if c.returnError {
|
|
||||||
// return nil, fmt.Errorf("get flavors failed")
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// return c.getFlavorsResp, nil
|
|
||||||
// }
|
|
||||||
|
|
||||||
// func TestNewInstanceResource(t *testing.T) {
|
// func TestNewInstanceResource(t *testing.T) {
|
||||||
// exData := resData{
|
// exData := resData{
|
||||||
// Region: "eu01",
|
// Region: "eu01",
|
||||||
|
|
@ -1028,3 +1122,87 @@ func TestAccInstanceWithDatabases(t *testing.T) {
|
||||||
// }
|
// }
|
||||||
// return nil
|
// return nil
|
||||||
//}
|
//}
|
||||||
|
|
||||||
|
func testAccCheckPostgresFlexDestroy(s *terraform.State) error {
|
||||||
|
testutils.Setup()
|
||||||
|
|
||||||
|
pID, ok := os.LookupEnv("TF_ACC_PROJECT_ID")
|
||||||
|
if !ok {
|
||||||
|
log.Fatalln("unable to read TF_ACC_PROJECT_ID")
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx := context.Background()
|
||||||
|
var client *v3alpha1api.APIClient
|
||||||
|
var err error
|
||||||
|
|
||||||
|
var region, projectID string
|
||||||
|
region = testutils.Region
|
||||||
|
if region == "" {
|
||||||
|
region = "eu01"
|
||||||
|
}
|
||||||
|
|
||||||
|
projectID = pID
|
||||||
|
if projectID == "" {
|
||||||
|
return fmt.Errorf("projectID could not be determined in destroy function")
|
||||||
|
}
|
||||||
|
|
||||||
|
apiClientConfigOptions := []config.ConfigurationOption{
|
||||||
|
config.WithServiceAccountKeyPath(os.Getenv("TF_ACC_SERVICE_ACCOUNT_FILE")),
|
||||||
|
config.WithRegion(region),
|
||||||
|
}
|
||||||
|
if testutils.PostgresFlexCustomEndpoint != "" {
|
||||||
|
apiClientConfigOptions = append(
|
||||||
|
apiClientConfigOptions,
|
||||||
|
config.WithEndpoint(testutils.PostgresFlexCustomEndpoint),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
client, err = v3alpha1api.NewAPIClient(apiClientConfigOptions...)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalln(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
instancesToDestroy := []string{}
|
||||||
|
for _, rs := range s.RootModule().Resources {
|
||||||
|
if rs.Type != "stackitprivatepreview_postgresflexalpha_instance" &&
|
||||||
|
rs.Type != "stackitprivatepreview_postgresflexbeta_instance" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// instance terraform ID: = "[project_id],[region],[instance_id]"
|
||||||
|
instanceID := strings.Split(rs.Primary.ID, core.Separator)[2]
|
||||||
|
instancesToDestroy = append(instancesToDestroy, instanceID)
|
||||||
|
}
|
||||||
|
|
||||||
|
instancesResp, err := client.DefaultAPI.ListInstancesRequest(ctx, projectID, region).
|
||||||
|
Size(100).
|
||||||
|
Execute()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("getting instancesResp: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
items := instancesResp.GetInstances()
|
||||||
|
for i := range items {
|
||||||
|
if items[i].Id == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if utils.Contains(instancesToDestroy, items[i].Id) {
|
||||||
|
err := client.DefaultAPI.DeleteInstanceRequest(ctx, testutils.ProjectId, region, items[i].Id).Execute()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("deleting instance %s during CheckDestroy: %w", items[i].Id, err)
|
||||||
|
}
|
||||||
|
err = postgresflexalpha.DeleteInstanceWaitHandler(
|
||||||
|
ctx,
|
||||||
|
client.DefaultAPI,
|
||||||
|
testutils.ProjectId,
|
||||||
|
testutils.Region,
|
||||||
|
items[i].Id,
|
||||||
|
15*time.Minute,
|
||||||
|
10*time.Second,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("deleting instance %s during CheckDestroy: waiting for deletion %w", items[i].Id, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -4,11 +4,11 @@ provider "stackitprivatepreview" {
|
||||||
}
|
}
|
||||||
|
|
||||||
resource "stackitprivatepreview_postgresflexalpha_instance" "{{ .TfName }}" {
|
resource "stackitprivatepreview_postgresflexalpha_instance" "{{ .TfName }}" {
|
||||||
project_id = "{{ .ProjectId }}"
|
project_id = "{{ .ProjectID }}"
|
||||||
name = "{{ .Name }}"
|
name = "{{ .Name }}"
|
||||||
backup_schedule = "{{ .BackupSchedule }}"
|
backup_schedule = "{{ .BackupSchedule }}"
|
||||||
retention_days = {{ .RetentionDays }}
|
retention_days = {{ .RetentionDays }}
|
||||||
flavor_id = "{{ .FlavorId }}"
|
flavor_id = "{{ .FlavorID }}"
|
||||||
replicas = {{ .Replicas }}
|
replicas = {{ .Replicas }}
|
||||||
storage = {
|
storage = {
|
||||||
performance_class = "{{ .PerformanceClass }}"
|
performance_class = "{{ .PerformanceClass }}"
|
||||||
|
|
@ -16,14 +16,14 @@ resource "stackitprivatepreview_postgresflexalpha_instance" "{{ .TfName }}" {
|
||||||
}
|
}
|
||||||
{{ if .UseEncryption }}
|
{{ if .UseEncryption }}
|
||||||
encryption = {
|
encryption = {
|
||||||
kek_key_id = {{ .KekKeyId }}
|
kek_key_id = "{{ .KekKeyID }}"
|
||||||
kek_key_ring_id = {{ .KekKeyRingId }}
|
kek_key_ring_id = "{{ .KekKeyRingID }}"
|
||||||
kek_key_version = {{ .KekKeyVersion }}
|
kek_key_version = {{ .KekKeyVersion }}
|
||||||
service_account = "{{ .KekServiceAccount }}"
|
service_account = "{{ .KekServiceAccount }}"
|
||||||
}
|
}
|
||||||
{{ end }}
|
{{ end }}
|
||||||
network = {
|
network = {
|
||||||
acl = ["{{ .AclString }}"]
|
acl = ["{{ .ACLString }}"]
|
||||||
access_scope = "{{ .AccessScope }}"
|
access_scope = "{{ .AccessScope }}"
|
||||||
}
|
}
|
||||||
version = {{ .Version }}
|
version = {{ .Version }}
|
||||||
|
|
@ -33,7 +33,7 @@ resource "stackitprivatepreview_postgresflexalpha_instance" "{{ .TfName }}" {
|
||||||
{{ $tfName := .TfName }}
|
{{ $tfName := .TfName }}
|
||||||
{{ range $user := .Users }}
|
{{ range $user := .Users }}
|
||||||
resource "stackitprivatepreview_postgresflexalpha_user" "{{ $user.Name }}" {
|
resource "stackitprivatepreview_postgresflexalpha_user" "{{ $user.Name }}" {
|
||||||
project_id = "{{ $user.ProjectId }}"
|
project_id = "{{ $user.ProjectID }}"
|
||||||
instance_id = stackitprivatepreview_postgresflexalpha_instance.{{ $tfName }}.instance_id
|
instance_id = stackitprivatepreview_postgresflexalpha_instance.{{ $tfName }}.instance_id
|
||||||
name = "{{ $user.Name }}"
|
name = "{{ $user.Name }}"
|
||||||
roles = [{{ range $i, $v := $user.Roles }}{{if $i}},{{end}}"{{$v}}"{{end}}]
|
roles = [{{ range $i, $v := $user.Roles }}{{if $i}},{{end}}"{{$v}}"{{end}}]
|
||||||
|
|
@ -45,7 +45,7 @@ resource "stackitprivatepreview_postgresflexalpha_user" "{{ $user.Name }}" {
|
||||||
{{ $tfName := .TfName }}
|
{{ $tfName := .TfName }}
|
||||||
{{ range $db := .Databases }}
|
{{ range $db := .Databases }}
|
||||||
resource "stackitprivatepreview_postgresflexalpha_database" "{{ $db.Name }}" {
|
resource "stackitprivatepreview_postgresflexalpha_database" "{{ $db.Name }}" {
|
||||||
project_id = "{{ $db.ProjectId }}"
|
project_id = "{{ $db.ProjectID }}"
|
||||||
instance_id = stackitprivatepreview_postgresflexalpha_instance.{{ $tfName }}.instance_id
|
instance_id = stackitprivatepreview_postgresflexalpha_instance.{{ $tfName }}.instance_id
|
||||||
name = "{{ $db.Name }}"
|
name = "{{ $db.Name }}"
|
||||||
owner = stackitprivatepreview_postgresflexalpha_user.{{ $db.Owner }}.name
|
owner = stackitprivatepreview_postgresflexalpha_user.{{ $db.Owner }}.name
|
||||||
|
|
|
||||||
|
|
@ -1,27 +0,0 @@
|
||||||
variable "project_id" {}
|
|
||||||
variable "kek_key_id" {}
|
|
||||||
variable "kek_key_ring_id" {}
|
|
||||||
|
|
||||||
resource "stackitprivatepreview_postgresflexalpha_instance" "msh-instance-only" {
|
|
||||||
project_id = var.project_id
|
|
||||||
name = "example-instance"
|
|
||||||
backup_schedule = "0 0 * * *"
|
|
||||||
retention_days = 30
|
|
||||||
flavor_id = "2.4"
|
|
||||||
replicas = 1
|
|
||||||
storage = {
|
|
||||||
performance_class = "premium-perf2-stackit"
|
|
||||||
size = 10
|
|
||||||
}
|
|
||||||
encryption = {
|
|
||||||
kek_key_id = var.kek_key_id
|
|
||||||
kek_key_ring_id = var.kek_key_ring_id
|
|
||||||
kek_key_version = 1
|
|
||||||
service_account = "service@account.email"
|
|
||||||
}
|
|
||||||
network = {
|
|
||||||
acl = ["0.0.0.0/0"]
|
|
||||||
access_scope = "PUBLIC"
|
|
||||||
}
|
|
||||||
version = 17
|
|
||||||
}
|
|
||||||
|
|
@ -1,19 +0,0 @@
|
||||||
variable "project_id" {}
|
|
||||||
|
|
||||||
resource "stackitprivatepreview_postgresflexalpha_instance" "msh-instance-only" {
|
|
||||||
project_id = var.project_id
|
|
||||||
name = "example-instance"
|
|
||||||
backup_schedule = "0 0 * * *"
|
|
||||||
retention_days = 30
|
|
||||||
flavor_id = "2.4"
|
|
||||||
replicas = 1
|
|
||||||
storage = {
|
|
||||||
performance_class = "premium-perf2-stackit"
|
|
||||||
size = 10
|
|
||||||
}
|
|
||||||
network = {
|
|
||||||
acl = ["0.0.0.0/0"]
|
|
||||||
access_scope = "PUBLIC"
|
|
||||||
}
|
|
||||||
version = 17
|
|
||||||
}
|
|
||||||
|
|
@ -8,8 +8,8 @@ import (
|
||||||
|
|
||||||
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
|
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
|
||||||
"github.com/hashicorp/terraform-plugin-framework/diag"
|
"github.com/hashicorp/terraform-plugin-framework/diag"
|
||||||
|
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
|
|
||||||
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
|
||||||
postgresflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user/datasources_gen"
|
postgresflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user/datasources_gen"
|
||||||
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
|
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
|
||||||
|
|
@ -40,7 +40,7 @@ type dataSourceModel struct {
|
||||||
|
|
||||||
// userDataSource is the data source implementation.
|
// userDataSource is the data source implementation.
|
||||||
type userDataSource struct {
|
type userDataSource struct {
|
||||||
client *postgresflex.APIClient
|
client *v3alpha1api.APIClient
|
||||||
providerData core.ProviderData
|
providerData core.ProviderData
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -101,24 +101,24 @@ func (r *userDataSource) Read(
|
||||||
|
|
||||||
ctx = core.InitProviderContext(ctx)
|
ctx = core.InitProviderContext(ctx)
|
||||||
|
|
||||||
projectId := model.ProjectId.ValueString()
|
projectID := model.ProjectId.ValueString()
|
||||||
instanceId := model.InstanceId.ValueString()
|
instanceID := model.InstanceId.ValueString()
|
||||||
userId64 := model.UserId.ValueInt64()
|
userID64 := model.UserId.ValueInt64()
|
||||||
if userId64 > math.MaxInt32 {
|
if userID64 > math.MaxInt32 {
|
||||||
core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (userId)")
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (userId)")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
userId := int32(userId64) // nolint:gosec // check is performed above
|
userID := int32(userID64) // nolint:gosec // check is performed above
|
||||||
|
|
||||||
region := r.providerData.GetRegionWithOverride(model.Region)
|
region := r.providerData.GetRegionWithOverride(model.Region)
|
||||||
ctx = tflog.SetField(ctx, "project_id", projectId)
|
ctx = tflog.SetField(ctx, "project_id", projectID)
|
||||||
ctx = tflog.SetField(ctx, "instance_id", instanceId)
|
ctx = tflog.SetField(ctx, "instance_id", instanceID)
|
||||||
ctx = tflog.SetField(ctx, "region", region)
|
ctx = tflog.SetField(ctx, "region", region)
|
||||||
ctx = tflog.SetField(ctx, "user_id", userId)
|
ctx = tflog.SetField(ctx, "user_id", userID)
|
||||||
|
|
||||||
recordSetResp, err := r.client.GetUserRequest(ctx, projectId, region, instanceId, userId).Execute()
|
recordSetResp, err := r.client.DefaultAPI.GetUserRequest(ctx, projectID, region, instanceID, userID).Execute()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
handleReadError(ctx, &diags, err, projectId, instanceId, userId)
|
handleReadError(ctx, &diags, err, projectID, instanceID, userID)
|
||||||
resp.State.RemoveResource(ctx)
|
resp.State.RemoveResource(ctx)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
@ -151,8 +151,8 @@ func handleReadError(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
diags *diag.Diagnostics,
|
diags *diag.Diagnostics,
|
||||||
err error,
|
err error,
|
||||||
projectId, instanceId string,
|
projectID, instanceID string,
|
||||||
userId int32,
|
userID int32,
|
||||||
) {
|
) {
|
||||||
utils.LogError(
|
utils.LogError(
|
||||||
ctx,
|
ctx,
|
||||||
|
|
@ -161,23 +161,23 @@ func handleReadError(
|
||||||
"Reading user",
|
"Reading user",
|
||||||
fmt.Sprintf(
|
fmt.Sprintf(
|
||||||
"User with ID %q or instance with ID %q does not exist in project %q.",
|
"User with ID %q or instance with ID %q does not exist in project %q.",
|
||||||
userId,
|
userID,
|
||||||
instanceId,
|
instanceID,
|
||||||
projectId,
|
projectID,
|
||||||
),
|
),
|
||||||
map[int]string{
|
map[int]string{
|
||||||
http.StatusBadRequest: fmt.Sprintf(
|
http.StatusBadRequest: fmt.Sprintf(
|
||||||
"Invalid user request parameters for project %q and instance %q.",
|
"Invalid user request parameters for project %q and instance %q.",
|
||||||
projectId,
|
projectID,
|
||||||
instanceId,
|
instanceID,
|
||||||
),
|
),
|
||||||
http.StatusNotFound: fmt.Sprintf(
|
http.StatusNotFound: fmt.Sprintf(
|
||||||
"User, instance %q, or project %q or user %q not found.",
|
"User, instance %q, or project %q or user %q not found.",
|
||||||
instanceId,
|
instanceID,
|
||||||
projectId,
|
projectID,
|
||||||
userId,
|
userID,
|
||||||
),
|
),
|
||||||
http.StatusForbidden: fmt.Sprintf("Forbidden access to project %q.", projectId),
|
http.StatusForbidden: fmt.Sprintf("Forbidden access to project %q.", projectID),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -6,14 +6,14 @@ import (
|
||||||
|
|
||||||
"github.com/hashicorp/terraform-plugin-framework/attr"
|
"github.com/hashicorp/terraform-plugin-framework/attr"
|
||||||
"github.com/hashicorp/terraform-plugin-framework/types"
|
"github.com/hashicorp/terraform-plugin-framework/types"
|
||||||
|
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
|
|
||||||
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
// mapDataSourceFields maps API response to data source model, preserving existing ID.
|
// mapDataSourceFields maps API response to data source model, preserving existing ID.
|
||||||
func mapDataSourceFields(userResp *postgresflex.GetUserResponse, model *dataSourceModel, region string) error {
|
func mapDataSourceFields(userResp *v3alpha1api.GetUserResponse, model *dataSourceModel, region string) error {
|
||||||
if userResp == nil {
|
if userResp == nil {
|
||||||
return fmt.Errorf("response is nil")
|
return fmt.Errorf("response is nil")
|
||||||
}
|
}
|
||||||
|
|
@ -22,27 +22,24 @@ func mapDataSourceFields(userResp *postgresflex.GetUserResponse, model *dataSour
|
||||||
}
|
}
|
||||||
user := userResp
|
user := userResp
|
||||||
|
|
||||||
var userId int64
|
var userID int64
|
||||||
if model.UserId.ValueInt64() != 0 {
|
if model.UserId.ValueInt64() == 0 {
|
||||||
userId = model.UserId.ValueInt64()
|
|
||||||
} else if user.Id != nil {
|
|
||||||
userId = *user.Id
|
|
||||||
} else {
|
|
||||||
return fmt.Errorf("user id not present")
|
return fmt.Errorf("user id not present")
|
||||||
}
|
}
|
||||||
|
userID = model.UserId.ValueInt64()
|
||||||
|
|
||||||
model.TerraformID = utils.BuildInternalTerraformId(
|
model.TerraformID = utils.BuildInternalTerraformId(
|
||||||
model.ProjectId.ValueString(), region, model.InstanceId.ValueString(), strconv.FormatInt(userId, 10),
|
model.ProjectId.ValueString(), region, model.InstanceId.ValueString(), strconv.FormatInt(userID, 10),
|
||||||
)
|
)
|
||||||
|
|
||||||
model.UserId = types.Int64Value(userId)
|
model.UserId = types.Int64Value(userID)
|
||||||
model.Name = types.StringValue(user.GetName())
|
model.Name = types.StringValue(user.GetName())
|
||||||
|
|
||||||
if user.Roles == nil {
|
if user.Roles == nil {
|
||||||
model.Roles = types.List(types.SetNull(types.StringType))
|
model.Roles = types.List(types.SetNull(types.StringType))
|
||||||
} else {
|
} else {
|
||||||
var roles []attr.Value
|
var roles []attr.Value
|
||||||
for _, role := range *user.Roles {
|
for _, role := range user.Roles {
|
||||||
roles = append(roles, types.StringValue(string(role)))
|
roles = append(roles, types.StringValue(string(role)))
|
||||||
}
|
}
|
||||||
rolesSet, diags := types.SetValue(types.StringType, roles)
|
rolesSet, diags := types.SetValue(types.StringType, roles)
|
||||||
|
|
@ -52,24 +49,24 @@ func mapDataSourceFields(userResp *postgresflex.GetUserResponse, model *dataSour
|
||||||
model.Roles = types.List(rolesSet)
|
model.Roles = types.List(rolesSet)
|
||||||
}
|
}
|
||||||
|
|
||||||
model.Id = types.Int64Value(userId)
|
model.Id = types.Int64Value(userID)
|
||||||
model.Region = types.StringValue(region)
|
model.Region = types.StringValue(region)
|
||||||
model.Status = types.StringValue(user.GetStatus())
|
model.Status = types.StringValue(user.GetStatus())
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// toPayloadRoles converts a string slice to the API's role type.
|
// toPayloadRoles converts a string slice to the API's role type.
|
||||||
func toPayloadRoles(roles *[]string) *[]postgresflex.UserRole {
|
func toPayloadRoles(roles []string) []v3alpha1api.UserRole {
|
||||||
var userRoles = make([]postgresflex.UserRole, 0, len(*roles))
|
var userRoles = make([]v3alpha1api.UserRole, 0, len(roles))
|
||||||
for _, role := range *roles {
|
for _, role := range roles {
|
||||||
userRoles = append(userRoles, postgresflex.UserRole(role))
|
userRoles = append(userRoles, v3alpha1api.UserRole(role))
|
||||||
}
|
}
|
||||||
return &userRoles
|
return userRoles
|
||||||
}
|
}
|
||||||
|
|
||||||
// toUpdatePayload creates an API update payload from the resource model.
|
// toUpdatePayload creates an API update payload from the resource model.
|
||||||
func toUpdatePayload(model *resourceModel, roles *[]string) (
|
func toUpdatePayload(model *resourceModel, roles []string) (
|
||||||
*postgresflex.UpdateUserRequestPayload,
|
*v3alpha1api.UpdateUserRequestPayload,
|
||||||
error,
|
error,
|
||||||
) {
|
) {
|
||||||
if model == nil {
|
if model == nil {
|
||||||
|
|
@ -79,14 +76,14 @@ func toUpdatePayload(model *resourceModel, roles *[]string) (
|
||||||
return nil, fmt.Errorf("nil roles")
|
return nil, fmt.Errorf("nil roles")
|
||||||
}
|
}
|
||||||
|
|
||||||
return &postgresflex.UpdateUserRequestPayload{
|
return &v3alpha1api.UpdateUserRequestPayload{
|
||||||
Name: model.Name.ValueStringPointer(),
|
Name: model.Name.ValueStringPointer(),
|
||||||
Roles: toPayloadRoles(roles),
|
Roles: toPayloadRoles(roles),
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// toCreatePayload creates an API create payload from the resource model.
|
// toCreatePayload creates an API create payload from the resource model.
|
||||||
func toCreatePayload(model *resourceModel, roles *[]string) (*postgresflex.CreateUserRequestPayload, error) {
|
func toCreatePayload(model *resourceModel, roles []string) (*v3alpha1api.CreateUserRequestPayload, error) {
|
||||||
if model == nil {
|
if model == nil {
|
||||||
return nil, fmt.Errorf("nil model")
|
return nil, fmt.Errorf("nil model")
|
||||||
}
|
}
|
||||||
|
|
@ -94,14 +91,14 @@ func toCreatePayload(model *resourceModel, roles *[]string) (*postgresflex.Creat
|
||||||
return nil, fmt.Errorf("nil roles")
|
return nil, fmt.Errorf("nil roles")
|
||||||
}
|
}
|
||||||
|
|
||||||
return &postgresflex.CreateUserRequestPayload{
|
return &v3alpha1api.CreateUserRequestPayload{
|
||||||
Roles: toPayloadRoles(roles),
|
Roles: toPayloadRoles(roles),
|
||||||
Name: model.Name.ValueStringPointer(),
|
Name: model.Name.ValueString(),
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// mapResourceFields maps API response to the resource model, preserving existing ID.
|
// mapResourceFields maps API response to the resource model, preserving existing ID.
|
||||||
func mapResourceFields(userResp *postgresflex.GetUserResponse, model *resourceModel, region string) error {
|
func mapResourceFields(userResp *v3alpha1api.GetUserResponse, model *resourceModel, region string) error {
|
||||||
if userResp == nil {
|
if userResp == nil {
|
||||||
return fmt.Errorf("response is nil")
|
return fmt.Errorf("response is nil")
|
||||||
}
|
}
|
||||||
|
|
@ -110,24 +107,24 @@ func mapResourceFields(userResp *postgresflex.GetUserResponse, model *resourceMo
|
||||||
}
|
}
|
||||||
user := userResp
|
user := userResp
|
||||||
|
|
||||||
var userId int64
|
var userID int64
|
||||||
if !model.UserId.IsNull() && !model.UserId.IsUnknown() && model.UserId.ValueInt64() != 0 {
|
if !model.UserId.IsNull() && !model.UserId.IsUnknown() && model.UserId.ValueInt64() != 0 {
|
||||||
userId = model.UserId.ValueInt64()
|
userID = model.UserId.ValueInt64()
|
||||||
} else if user.Id != nil {
|
} else if user.Id != 0 {
|
||||||
userId = *user.Id
|
userID = int64(user.Id)
|
||||||
} else {
|
} else {
|
||||||
return fmt.Errorf("user id not present")
|
return fmt.Errorf("user id not present")
|
||||||
}
|
}
|
||||||
|
|
||||||
model.Id = types.Int64Value(userId)
|
model.Id = types.Int64Value(userID)
|
||||||
model.UserId = types.Int64Value(userId)
|
model.UserId = types.Int64Value(userID)
|
||||||
model.Name = types.StringPointerValue(user.Name)
|
model.Name = types.StringValue(user.Name)
|
||||||
|
|
||||||
if user.Roles == nil {
|
if user.Roles == nil {
|
||||||
model.Roles = types.List(types.SetNull(types.StringType))
|
model.Roles = types.List(types.SetNull(types.StringType))
|
||||||
} else {
|
} else {
|
||||||
var roles []attr.Value
|
var roles []attr.Value
|
||||||
for _, role := range *user.Roles {
|
for _, role := range user.Roles {
|
||||||
roles = append(roles, types.StringValue(string(role)))
|
roles = append(roles, types.StringValue(string(role)))
|
||||||
}
|
}
|
||||||
rolesSet, diags := types.SetValue(types.StringType, roles)
|
rolesSet, diags := types.SetValue(types.StringType, roles)
|
||||||
|
|
@ -137,6 +134,6 @@ func mapResourceFields(userResp *postgresflex.GetUserResponse, model *resourceMo
|
||||||
model.Roles = types.List(rolesSet)
|
model.Roles = types.List(rolesSet)
|
||||||
}
|
}
|
||||||
model.Region = types.StringValue(region)
|
model.Region = types.StringValue(region)
|
||||||
model.Status = types.StringPointerValue(user.Status)
|
model.Status = types.StringValue(user.Status)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,8 @@ import (
|
||||||
"github.com/hashicorp/terraform-plugin-framework/types"
|
"github.com/hashicorp/terraform-plugin-framework/types"
|
||||||
"github.com/stackitcloud/stackit-sdk-go/core/utils"
|
"github.com/stackitcloud/stackit-sdk-go/core/utils"
|
||||||
|
|
||||||
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
|
postgresflex "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
|
|
||||||
data "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user/datasources_gen"
|
data "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user/datasources_gen"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -43,12 +44,12 @@ func TestMapDataSourceFields(t *testing.T) {
|
||||||
{
|
{
|
||||||
"simple_values",
|
"simple_values",
|
||||||
&postgresflex.GetUserResponse{
|
&postgresflex.GetUserResponse{
|
||||||
Roles: &[]postgresflex.UserRole{
|
Roles: []postgresflex.UserRole{
|
||||||
"role_1",
|
"role_1",
|
||||||
"role_2",
|
"role_2",
|
||||||
"",
|
"",
|
||||||
},
|
},
|
||||||
Name: utils.Ptr("username"),
|
Name: "username",
|
||||||
},
|
},
|
||||||
testRegion,
|
testRegion,
|
||||||
dataSourceModel{
|
dataSourceModel{
|
||||||
|
|
@ -77,10 +78,10 @@ func TestMapDataSourceFields(t *testing.T) {
|
||||||
{
|
{
|
||||||
"null_fields_and_int_conversions",
|
"null_fields_and_int_conversions",
|
||||||
&postgresflex.GetUserResponse{
|
&postgresflex.GetUserResponse{
|
||||||
Id: utils.Ptr(int64(1)),
|
Id: int32(1),
|
||||||
Roles: &[]postgresflex.UserRole{},
|
Roles: []postgresflex.UserRole{},
|
||||||
Name: nil,
|
Name: "",
|
||||||
Status: utils.Ptr("status"),
|
Status: "status",
|
||||||
},
|
},
|
||||||
testRegion,
|
testRegion,
|
||||||
dataSourceModel{
|
dataSourceModel{
|
||||||
|
|
@ -160,7 +161,7 @@ func TestMapFieldsCreate(t *testing.T) {
|
||||||
{
|
{
|
||||||
"default_values",
|
"default_values",
|
||||||
&postgresflex.GetUserResponse{
|
&postgresflex.GetUserResponse{
|
||||||
Id: utils.Ptr(int64(1)),
|
Id: int32(1),
|
||||||
},
|
},
|
||||||
testRegion,
|
testRegion,
|
||||||
resourceModel{
|
resourceModel{
|
||||||
|
|
@ -168,11 +169,11 @@ func TestMapFieldsCreate(t *testing.T) {
|
||||||
UserId: types.Int64Value(1),
|
UserId: types.Int64Value(1),
|
||||||
InstanceId: types.StringValue("iid"),
|
InstanceId: types.StringValue("iid"),
|
||||||
ProjectId: types.StringValue("pid"),
|
ProjectId: types.StringValue("pid"),
|
||||||
Name: types.StringNull(),
|
Name: types.StringValue(""),
|
||||||
Roles: types.List(types.SetNull(types.StringType)),
|
Roles: types.List(types.SetNull(types.StringType)),
|
||||||
Password: types.StringNull(),
|
Password: types.StringNull(),
|
||||||
Region: types.StringValue(testRegion),
|
Region: types.StringValue(testRegion),
|
||||||
Status: types.StringNull(),
|
Status: types.StringValue(""),
|
||||||
//ConnectionString: types.StringNull(),
|
//ConnectionString: types.StringNull(),
|
||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
|
|
@ -180,9 +181,9 @@ func TestMapFieldsCreate(t *testing.T) {
|
||||||
{
|
{
|
||||||
"simple_values",
|
"simple_values",
|
||||||
&postgresflex.GetUserResponse{
|
&postgresflex.GetUserResponse{
|
||||||
Id: utils.Ptr(int64(1)),
|
Id: int32(1),
|
||||||
Name: utils.Ptr("username"),
|
Name: "username",
|
||||||
Status: utils.Ptr("status"),
|
Status: "status",
|
||||||
},
|
},
|
||||||
testRegion,
|
testRegion,
|
||||||
resourceModel{
|
resourceModel{
|
||||||
|
|
@ -202,9 +203,9 @@ func TestMapFieldsCreate(t *testing.T) {
|
||||||
{
|
{
|
||||||
"null_fields_and_int_conversions",
|
"null_fields_and_int_conversions",
|
||||||
&postgresflex.GetUserResponse{
|
&postgresflex.GetUserResponse{
|
||||||
Id: utils.Ptr(int64(1)),
|
Id: int32(1),
|
||||||
Name: nil,
|
Name: "",
|
||||||
Status: nil,
|
Status: "",
|
||||||
},
|
},
|
||||||
testRegion,
|
testRegion,
|
||||||
resourceModel{
|
resourceModel{
|
||||||
|
|
@ -212,11 +213,11 @@ func TestMapFieldsCreate(t *testing.T) {
|
||||||
UserId: types.Int64Value(1),
|
UserId: types.Int64Value(1),
|
||||||
InstanceId: types.StringValue("iid"),
|
InstanceId: types.StringValue("iid"),
|
||||||
ProjectId: types.StringValue("pid"),
|
ProjectId: types.StringValue("pid"),
|
||||||
Name: types.StringNull(),
|
Name: types.StringValue(""),
|
||||||
Roles: types.List(types.SetNull(types.StringType)),
|
Roles: types.List(types.SetNull(types.StringType)),
|
||||||
Password: types.StringNull(),
|
Password: types.StringNull(),
|
||||||
Region: types.StringValue(testRegion),
|
Region: types.StringValue(testRegion),
|
||||||
Status: types.StringNull(),
|
Status: types.StringValue(""),
|
||||||
//ConnectionString: types.StringNull(),
|
//ConnectionString: types.StringNull(),
|
||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
|
|
@ -259,7 +260,7 @@ func TestMapFieldsCreate(t *testing.T) {
|
||||||
t.Fatalf("Should not have failed: %v", err)
|
t.Fatalf("Should not have failed: %v", err)
|
||||||
}
|
}
|
||||||
if tt.isValid {
|
if tt.isValid {
|
||||||
diff := cmp.Diff(state, &tt.expected)
|
diff := cmp.Diff(&tt.expected, state)
|
||||||
if diff != "" {
|
if diff != "" {
|
||||||
t.Fatalf("Data does not match: %s", diff)
|
t.Fatalf("Data does not match: %s", diff)
|
||||||
}
|
}
|
||||||
|
|
@ -281,7 +282,7 @@ func TestMapFields(t *testing.T) {
|
||||||
{
|
{
|
||||||
"default_values",
|
"default_values",
|
||||||
&postgresflex.GetUserResponse{
|
&postgresflex.GetUserResponse{
|
||||||
Id: utils.Ptr(int64(1)),
|
Id: int32(1),
|
||||||
},
|
},
|
||||||
testRegion,
|
testRegion,
|
||||||
resourceModel{
|
resourceModel{
|
||||||
|
|
@ -289,10 +290,10 @@ func TestMapFields(t *testing.T) {
|
||||||
UserId: types.Int64Value(int64(1)),
|
UserId: types.Int64Value(int64(1)),
|
||||||
InstanceId: types.StringValue("iid"),
|
InstanceId: types.StringValue("iid"),
|
||||||
ProjectId: types.StringValue("pid"),
|
ProjectId: types.StringValue("pid"),
|
||||||
Name: types.StringNull(),
|
Name: types.StringValue(""),
|
||||||
Roles: types.List(types.SetNull(types.StringType)),
|
Roles: types.List(types.SetNull(types.StringType)),
|
||||||
Region: types.StringValue(testRegion),
|
Region: types.StringValue(testRegion),
|
||||||
Status: types.StringNull(),
|
Status: types.StringValue(""),
|
||||||
//ConnectionString: types.StringNull(),
|
//ConnectionString: types.StringNull(),
|
||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
|
|
@ -300,13 +301,13 @@ func TestMapFields(t *testing.T) {
|
||||||
{
|
{
|
||||||
"simple_values",
|
"simple_values",
|
||||||
&postgresflex.GetUserResponse{
|
&postgresflex.GetUserResponse{
|
||||||
Id: utils.Ptr(int64(1)),
|
Id: int32(1),
|
||||||
Roles: &[]postgresflex.UserRole{
|
Roles: []postgresflex.UserRole{
|
||||||
"role_1",
|
"role_1",
|
||||||
"role_2",
|
"role_2",
|
||||||
"",
|
"",
|
||||||
},
|
},
|
||||||
Name: utils.Ptr("username"),
|
Name: "username",
|
||||||
},
|
},
|
||||||
testRegion,
|
testRegion,
|
||||||
resourceModel{
|
resourceModel{
|
||||||
|
|
@ -325,7 +326,7 @@ func TestMapFields(t *testing.T) {
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
Region: types.StringValue(testRegion),
|
Region: types.StringValue(testRegion),
|
||||||
Status: types.StringNull(),
|
Status: types.StringValue(""),
|
||||||
//ConnectionString: types.StringNull(),
|
//ConnectionString: types.StringNull(),
|
||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
|
|
@ -333,8 +334,8 @@ func TestMapFields(t *testing.T) {
|
||||||
{
|
{
|
||||||
"null_fields_and_int_conversions",
|
"null_fields_and_int_conversions",
|
||||||
&postgresflex.GetUserResponse{
|
&postgresflex.GetUserResponse{
|
||||||
Id: utils.Ptr(int64(1)),
|
Id: int32(1),
|
||||||
Name: nil,
|
Name: "",
|
||||||
},
|
},
|
||||||
testRegion,
|
testRegion,
|
||||||
resourceModel{
|
resourceModel{
|
||||||
|
|
@ -342,10 +343,10 @@ func TestMapFields(t *testing.T) {
|
||||||
UserId: types.Int64Value(1),
|
UserId: types.Int64Value(1),
|
||||||
InstanceId: types.StringValue("iid"),
|
InstanceId: types.StringValue("iid"),
|
||||||
ProjectId: types.StringValue("pid"),
|
ProjectId: types.StringValue("pid"),
|
||||||
Name: types.StringNull(),
|
Name: types.StringValue(""),
|
||||||
Roles: types.List(types.SetNull(types.StringType)),
|
Roles: types.List(types.SetNull(types.StringType)),
|
||||||
Region: types.StringValue(testRegion),
|
Region: types.StringValue(testRegion),
|
||||||
Status: types.StringNull(),
|
Status: types.StringValue(""),
|
||||||
//ConnectionString: types.StringNull(),
|
//ConnectionString: types.StringNull(),
|
||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
|
|
@ -401,17 +402,17 @@ func TestToCreatePayload(t *testing.T) {
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
description string
|
description string
|
||||||
input *resourceModel
|
input *resourceModel
|
||||||
inputRoles *[]string
|
inputRoles []string
|
||||||
expected *postgresflex.CreateUserRequestPayload
|
expected *postgresflex.CreateUserRequestPayload
|
||||||
isValid bool
|
isValid bool
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
"default_values",
|
"default_values",
|
||||||
&resourceModel{},
|
&resourceModel{},
|
||||||
&[]string{},
|
[]string{},
|
||||||
&postgresflex.CreateUserRequestPayload{
|
&postgresflex.CreateUserRequestPayload{
|
||||||
Name: nil,
|
Name: "",
|
||||||
Roles: &[]postgresflex.UserRole{},
|
Roles: []postgresflex.UserRole{},
|
||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
},
|
},
|
||||||
|
|
@ -420,13 +421,13 @@ func TestToCreatePayload(t *testing.T) {
|
||||||
&resourceModel{
|
&resourceModel{
|
||||||
Name: types.StringValue("username"),
|
Name: types.StringValue("username"),
|
||||||
},
|
},
|
||||||
&[]string{
|
[]string{
|
||||||
"role_1",
|
"role_1",
|
||||||
"role_2",
|
"role_2",
|
||||||
},
|
},
|
||||||
&postgresflex.CreateUserRequestPayload{
|
&postgresflex.CreateUserRequestPayload{
|
||||||
Name: utils.Ptr("username"),
|
Name: "username",
|
||||||
Roles: &[]postgresflex.UserRole{
|
Roles: []postgresflex.UserRole{
|
||||||
"role_1",
|
"role_1",
|
||||||
"role_2",
|
"role_2",
|
||||||
},
|
},
|
||||||
|
|
@ -438,21 +439,21 @@ func TestToCreatePayload(t *testing.T) {
|
||||||
&resourceModel{
|
&resourceModel{
|
||||||
Name: types.StringNull(),
|
Name: types.StringNull(),
|
||||||
},
|
},
|
||||||
&[]string{
|
[]string{
|
||||||
"",
|
"",
|
||||||
},
|
},
|
||||||
&postgresflex.CreateUserRequestPayload{
|
&postgresflex.CreateUserRequestPayload{
|
||||||
Roles: &[]postgresflex.UserRole{
|
Roles: []postgresflex.UserRole{
|
||||||
"",
|
"",
|
||||||
},
|
},
|
||||||
Name: nil,
|
Name: "",
|
||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"nil_model",
|
"nil_model",
|
||||||
nil,
|
nil,
|
||||||
&[]string{},
|
[]string{},
|
||||||
nil,
|
nil,
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
|
|
@ -489,16 +490,16 @@ func TestToUpdatePayload(t *testing.T) {
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
description string
|
description string
|
||||||
input *resourceModel
|
input *resourceModel
|
||||||
inputRoles *[]string
|
inputRoles []string
|
||||||
expected *postgresflex.UpdateUserRequestPayload
|
expected *postgresflex.UpdateUserRequestPayload
|
||||||
isValid bool
|
isValid bool
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
"default_values",
|
"default_values",
|
||||||
&resourceModel{},
|
&resourceModel{},
|
||||||
&[]string{},
|
[]string{},
|
||||||
&postgresflex.UpdateUserRequestPayload{
|
&postgresflex.UpdateUserRequestPayload{
|
||||||
Roles: &[]postgresflex.UserRole{},
|
Roles: []postgresflex.UserRole{},
|
||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
},
|
},
|
||||||
|
|
@ -507,13 +508,13 @@ func TestToUpdatePayload(t *testing.T) {
|
||||||
&resourceModel{
|
&resourceModel{
|
||||||
Name: types.StringValue("username"),
|
Name: types.StringValue("username"),
|
||||||
},
|
},
|
||||||
&[]string{
|
[]string{
|
||||||
"role_1",
|
"role_1",
|
||||||
"role_2",
|
"role_2",
|
||||||
},
|
},
|
||||||
&postgresflex.UpdateUserRequestPayload{
|
&postgresflex.UpdateUserRequestPayload{
|
||||||
Name: utils.Ptr("username"),
|
Name: utils.Ptr("username"),
|
||||||
Roles: &[]postgresflex.UserRole{
|
Roles: []postgresflex.UserRole{
|
||||||
"role_1",
|
"role_1",
|
||||||
"role_2",
|
"role_2",
|
||||||
},
|
},
|
||||||
|
|
@ -525,11 +526,11 @@ func TestToUpdatePayload(t *testing.T) {
|
||||||
&resourceModel{
|
&resourceModel{
|
||||||
Name: types.StringNull(),
|
Name: types.StringNull(),
|
||||||
},
|
},
|
||||||
&[]string{
|
[]string{
|
||||||
"",
|
"",
|
||||||
},
|
},
|
||||||
&postgresflex.UpdateUserRequestPayload{
|
&postgresflex.UpdateUserRequestPayload{
|
||||||
Roles: &[]postgresflex.UserRole{
|
Roles: []postgresflex.UserRole{
|
||||||
"",
|
"",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
@ -538,7 +539,7 @@ func TestToUpdatePayload(t *testing.T) {
|
||||||
{
|
{
|
||||||
"nil_model",
|
"nil_model",
|
||||||
nil,
|
nil,
|
||||||
&[]string{},
|
[]string{},
|
||||||
nil,
|
nil,
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -12,8 +12,8 @@ import (
|
||||||
|
|
||||||
"github.com/hashicorp/terraform-plugin-framework/diag"
|
"github.com/hashicorp/terraform-plugin-framework/diag"
|
||||||
"github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
|
"github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
|
||||||
|
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
|
|
||||||
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
|
|
||||||
postgresflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user/resources_gen"
|
postgresflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/user/resources_gen"
|
||||||
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
|
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
|
||||||
postgresflexalphaWait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/postgresflexalpha"
|
postgresflexalphaWait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/postgresflexalpha"
|
||||||
|
|
@ -60,7 +60,7 @@ type UserResourceIdentityModel struct {
|
||||||
|
|
||||||
// userResource implements the resource handling for a PostgreSQL Flex user.
|
// userResource implements the resource handling for a PostgreSQL Flex user.
|
||||||
type userResource struct {
|
type userResource struct {
|
||||||
client *postgresflex.APIClient
|
client *v3alpha1api.APIClient
|
||||||
providerData core.ProviderData
|
providerData core.ProviderData
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -189,8 +189,8 @@ func (r *userResource) Create(
|
||||||
ctx = core.InitProviderContext(ctx)
|
ctx = core.InitProviderContext(ctx)
|
||||||
|
|
||||||
arg := &clientArg{
|
arg := &clientArg{
|
||||||
projectId: model.ProjectId.ValueString(),
|
projectID: model.ProjectId.ValueString(),
|
||||||
instanceId: model.InstanceId.ValueString(),
|
instanceID: model.InstanceId.ValueString(),
|
||||||
region: r.providerData.GetRegionWithOverride(model.Region),
|
region: r.providerData.GetRegionWithOverride(model.Region),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -202,18 +202,18 @@ func (r *userResource) Create(
|
||||||
}
|
}
|
||||||
|
|
||||||
// Generate API request body from model
|
// Generate API request body from model
|
||||||
payload, err := toCreatePayload(&model, &roles)
|
payload, err := toCreatePayload(&model, roles)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating user", fmt.Sprintf("Creating API payload: %v", err))
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating user", fmt.Sprintf("Creating API payload: %v", err))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create new user
|
// Create new user
|
||||||
userResp, err := r.client.CreateUserRequest(
|
userResp, err := r.client.DefaultAPI.CreateUserRequest(
|
||||||
ctx,
|
ctx,
|
||||||
arg.projectId,
|
arg.projectID,
|
||||||
arg.region,
|
arg.region,
|
||||||
arg.instanceId,
|
arg.instanceID,
|
||||||
).CreateUserRequestPayload(*payload).Execute()
|
).CreateUserRequestPayload(*payload).Execute()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating user", fmt.Sprintf("Calling API: %v", err))
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error creating user", fmt.Sprintf("Calling API: %v", err))
|
||||||
|
|
@ -221,7 +221,7 @@ func (r *userResource) Create(
|
||||||
}
|
}
|
||||||
|
|
||||||
id, ok := userResp.GetIdOk()
|
id, ok := userResp.GetIdOk()
|
||||||
if !ok || id == 0 {
|
if !ok || *id == 0 {
|
||||||
core.LogAndAddError(
|
core.LogAndAddError(
|
||||||
ctx,
|
ctx,
|
||||||
&resp.Diagnostics,
|
&resp.Diagnostics,
|
||||||
|
|
@ -230,7 +230,7 @@ func (r *userResource) Create(
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
arg.userId = id
|
arg.userID = int64(*id)
|
||||||
|
|
||||||
ctx = tflog.SetField(ctx, "user_id", id)
|
ctx = tflog.SetField(ctx, "user_id", id)
|
||||||
|
|
||||||
|
|
@ -238,28 +238,28 @@ func (r *userResource) Create(
|
||||||
|
|
||||||
// Set data returned by API in identity
|
// Set data returned by API in identity
|
||||||
identity := UserResourceIdentityModel{
|
identity := UserResourceIdentityModel{
|
||||||
ProjectID: types.StringValue(arg.projectId),
|
ProjectID: types.StringValue(arg.projectID),
|
||||||
Region: types.StringValue(arg.region),
|
Region: types.StringValue(arg.region),
|
||||||
InstanceID: types.StringValue(arg.instanceId),
|
InstanceID: types.StringValue(arg.instanceID),
|
||||||
UserID: types.Int64Value(id),
|
UserID: types.Int64Value(int64(*id)),
|
||||||
}
|
}
|
||||||
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
|
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
|
||||||
if resp.Diagnostics.HasError() {
|
if resp.Diagnostics.HasError() {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
model.Id = types.Int64Value(id)
|
model.Id = types.Int64Value(int64(*id))
|
||||||
model.UserId = types.Int64Value(id)
|
model.UserId = types.Int64Value(int64(*id))
|
||||||
model.Password = types.StringValue(userResp.GetPassword())
|
model.Password = types.StringValue(userResp.GetPassword())
|
||||||
model.Status = types.StringValue(userResp.GetStatus())
|
model.Status = types.StringValue(userResp.GetStatus())
|
||||||
|
|
||||||
waitResp, err := postgresflexalphaWait.GetUserByIdWaitHandler(
|
waitResp, err := postgresflexalphaWait.GetUserByIdWaitHandler(
|
||||||
ctx,
|
ctx,
|
||||||
r.client,
|
r.client.DefaultAPI,
|
||||||
arg.projectId,
|
arg.projectID,
|
||||||
arg.instanceId,
|
arg.instanceID,
|
||||||
arg.region,
|
arg.region,
|
||||||
id,
|
int64(*id),
|
||||||
).SetSleepBeforeWait(
|
).SetSleepBeforeWait(
|
||||||
10 * time.Second,
|
10 * time.Second,
|
||||||
).SetTimeout(
|
).SetTimeout(
|
||||||
|
|
@ -276,7 +276,7 @@ func (r *userResource) Create(
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if waitResp.Id == nil {
|
if waitResp.Id == 0 {
|
||||||
core.LogAndAddError(
|
core.LogAndAddError(
|
||||||
ctx,
|
ctx,
|
||||||
&resp.Diagnostics,
|
&resp.Diagnostics,
|
||||||
|
|
@ -285,7 +285,7 @@ func (r *userResource) Create(
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if waitResp.Id == nil || *waitResp.Id != id {
|
if waitResp.Id != *id {
|
||||||
core.LogAndAddError(
|
core.LogAndAddError(
|
||||||
ctx,
|
ctx,
|
||||||
&resp.Diagnostics,
|
&resp.Diagnostics,
|
||||||
|
|
@ -324,8 +324,8 @@ func (r *userResource) Read(
|
||||||
ctx = core.InitProviderContext(ctx)
|
ctx = core.InitProviderContext(ctx)
|
||||||
|
|
||||||
arg := &clientArg{
|
arg := &clientArg{
|
||||||
projectId: model.ProjectId.ValueString(),
|
projectID: model.ProjectId.ValueString(),
|
||||||
instanceId: model.InstanceId.ValueString(),
|
instanceID: model.InstanceId.ValueString(),
|
||||||
region: r.providerData.GetRegionWithOverride(model.Region),
|
region: r.providerData.GetRegionWithOverride(model.Region),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -336,9 +336,9 @@ func (r *userResource) Read(
|
||||||
// Read resource state
|
// Read resource state
|
||||||
waitResp, err := postgresflexalphaWait.GetUserByIdWaitHandler(
|
waitResp, err := postgresflexalphaWait.GetUserByIdWaitHandler(
|
||||||
ctx,
|
ctx,
|
||||||
r.client,
|
r.client.DefaultAPI,
|
||||||
arg.projectId,
|
arg.projectID,
|
||||||
arg.instanceId,
|
arg.instanceID,
|
||||||
arg.region,
|
arg.region,
|
||||||
model.UserId.ValueInt64(),
|
model.UserId.ValueInt64(),
|
||||||
).SetSleepBeforeWait(
|
).SetSleepBeforeWait(
|
||||||
|
|
@ -357,7 +357,7 @@ func (r *userResource) Read(
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if waitResp.Id == nil || *waitResp.Id != model.UserId.ValueInt64() {
|
if int64(waitResp.Id) != model.UserId.ValueInt64() {
|
||||||
core.LogAndAddError(
|
core.LogAndAddError(
|
||||||
ctx,
|
ctx,
|
||||||
&resp.Diagnostics,
|
&resp.Diagnostics,
|
||||||
|
|
@ -366,16 +366,16 @@ func (r *userResource) Read(
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
arg.userId = *waitResp.Id
|
arg.userID = int64(waitResp.Id)
|
||||||
|
|
||||||
ctx = core.LogResponse(ctx)
|
ctx = core.LogResponse(ctx)
|
||||||
|
|
||||||
// Set data returned by API in identity
|
// Set data returned by API in identity
|
||||||
identity := UserResourceIdentityModel{
|
identity := UserResourceIdentityModel{
|
||||||
ProjectID: types.StringValue(arg.projectId),
|
ProjectID: types.StringValue(arg.projectID),
|
||||||
Region: types.StringValue(arg.region),
|
Region: types.StringValue(arg.region),
|
||||||
InstanceID: types.StringValue(arg.instanceId),
|
InstanceID: types.StringValue(arg.instanceID),
|
||||||
UserID: types.Int64Value(arg.userId),
|
UserID: types.Int64Value(arg.userID),
|
||||||
}
|
}
|
||||||
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
|
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
|
||||||
if resp.Diagnostics.HasError() {
|
if resp.Diagnostics.HasError() {
|
||||||
|
|
@ -407,8 +407,8 @@ func (r *userResource) Update(
|
||||||
ctx = core.InitProviderContext(ctx)
|
ctx = core.InitProviderContext(ctx)
|
||||||
|
|
||||||
arg := &clientArg{
|
arg := &clientArg{
|
||||||
projectId: model.ProjectId.ValueString(),
|
projectID: model.ProjectId.ValueString(),
|
||||||
instanceId: model.InstanceId.ValueString(),
|
instanceID: model.InstanceId.ValueString(),
|
||||||
region: r.providerData.GetRegionWithOverride(model.Region),
|
region: r.providerData.GetRegionWithOverride(model.Region),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -429,26 +429,26 @@ func (r *userResource) Update(
|
||||||
}
|
}
|
||||||
|
|
||||||
// Generate API request body from model
|
// Generate API request body from model
|
||||||
payload, err := toUpdatePayload(&model, &roles)
|
payload, err := toUpdatePayload(&model, roles)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating user", fmt.Sprintf("Updating API payload: %v", err))
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating user", fmt.Sprintf("Updating API payload: %v", err))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
userId64 := arg.userId
|
userID64 := arg.userID
|
||||||
if userId64 > math.MaxInt32 {
|
if userID64 > math.MaxInt32 {
|
||||||
core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (userId)")
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (userId)")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
userId := int32(userId64) // nolint:gosec // check is performed above
|
userID := int32(userID64) // nolint:gosec // check is performed above
|
||||||
|
|
||||||
// Update existing instance
|
// Update existing instance
|
||||||
err = r.client.UpdateUserRequest(
|
err = r.client.DefaultAPI.UpdateUserRequest(
|
||||||
ctx,
|
ctx,
|
||||||
arg.projectId,
|
arg.projectID,
|
||||||
arg.region,
|
arg.region,
|
||||||
arg.instanceId,
|
arg.instanceID,
|
||||||
userId,
|
userID,
|
||||||
).UpdateUserRequestPayload(*payload).Execute()
|
).UpdateUserRequestPayload(*payload).Execute()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating user", err.Error())
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating user", err.Error())
|
||||||
|
|
@ -459,10 +459,10 @@ func (r *userResource) Update(
|
||||||
|
|
||||||
// Set data returned by API in identity
|
// Set data returned by API in identity
|
||||||
identity := UserResourceIdentityModel{
|
identity := UserResourceIdentityModel{
|
||||||
ProjectID: types.StringValue(arg.projectId),
|
ProjectID: types.StringValue(arg.projectID),
|
||||||
Region: types.StringValue(arg.region),
|
Region: types.StringValue(arg.region),
|
||||||
InstanceID: types.StringValue(arg.instanceId),
|
InstanceID: types.StringValue(arg.instanceID),
|
||||||
UserID: types.Int64Value(userId64),
|
UserID: types.Int64Value(userID64),
|
||||||
}
|
}
|
||||||
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
|
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
|
||||||
if resp.Diagnostics.HasError() {
|
if resp.Diagnostics.HasError() {
|
||||||
|
|
@ -472,9 +472,9 @@ func (r *userResource) Update(
|
||||||
// Verify update
|
// Verify update
|
||||||
waitResp, err := postgresflexalphaWait.GetUserByIdWaitHandler(
|
waitResp, err := postgresflexalphaWait.GetUserByIdWaitHandler(
|
||||||
ctx,
|
ctx,
|
||||||
r.client,
|
r.client.DefaultAPI,
|
||||||
arg.projectId,
|
arg.projectID,
|
||||||
arg.instanceId,
|
arg.instanceID,
|
||||||
arg.region,
|
arg.region,
|
||||||
model.UserId.ValueInt64(),
|
model.UserId.ValueInt64(),
|
||||||
).SetSleepBeforeWait(
|
).SetSleepBeforeWait(
|
||||||
|
|
@ -493,7 +493,7 @@ func (r *userResource) Update(
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if waitResp.Id == nil || *waitResp.Id != model.UserId.ValueInt64() {
|
if int64(waitResp.Id) != model.UserId.ValueInt64() {
|
||||||
core.LogAndAddError(
|
core.LogAndAddError(
|
||||||
ctx,
|
ctx,
|
||||||
&resp.Diagnostics,
|
&resp.Diagnostics,
|
||||||
|
|
@ -502,7 +502,7 @@ func (r *userResource) Update(
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
arg.userId = *waitResp.Id
|
arg.userID = int64(waitResp.Id)
|
||||||
|
|
||||||
// Set state to fully populated data
|
// Set state to fully populated data
|
||||||
diags = resp.State.Set(ctx, stateModel)
|
diags = resp.State.Set(ctx, stateModel)
|
||||||
|
|
@ -547,15 +547,15 @@ func (r *userResource) Delete(
|
||||||
ctx = r.setTFLogFields(ctx, arg)
|
ctx = r.setTFLogFields(ctx, arg)
|
||||||
ctx = core.InitProviderContext(ctx)
|
ctx = core.InitProviderContext(ctx)
|
||||||
|
|
||||||
userId64 := arg.userId
|
userID64 := arg.userID
|
||||||
if userId64 > math.MaxInt32 {
|
if userID64 > math.MaxInt32 {
|
||||||
core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (userId)")
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (userId)")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
userId := int32(userId64) // nolint:gosec // check is performed above
|
userID := int32(userID64) // nolint:gosec // check is performed above
|
||||||
|
|
||||||
// Delete existing record set
|
// Delete existing record set
|
||||||
err := r.client.DeleteUserRequest(ctx, arg.projectId, arg.region, arg.instanceId, userId).Execute()
|
err := r.client.DefaultAPI.DeleteUserRequest(ctx, arg.projectID, arg.region, arg.instanceID, userID).Execute()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting user", fmt.Sprintf("Calling API: %v", err))
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting user", fmt.Sprintf("Calling API: %v", err))
|
||||||
}
|
}
|
||||||
|
|
@ -571,7 +571,7 @@ func (r *userResource) Delete(
|
||||||
// if exists {
|
// if exists {
|
||||||
// core.LogAndAddError(
|
// core.LogAndAddError(
|
||||||
// ctx, &resp.Diagnostics, "Error deleting user",
|
// ctx, &resp.Diagnostics, "Error deleting user",
|
||||||
// fmt.Sprintf("User ID '%v' resource still exists after deletion", model.UserId.ValueInt64()),
|
// fmt.Sprintf("User ID '%v' resource still exists after deletion", model.UserId.ValueInt32()),
|
||||||
// )
|
// )
|
||||||
// return
|
// return
|
||||||
//}
|
//}
|
||||||
|
|
@ -607,10 +607,10 @@ func (r *userResource) IdentitySchema(
|
||||||
|
|
||||||
// clientArg holds the arguments for API calls.
|
// clientArg holds the arguments for API calls.
|
||||||
type clientArg struct {
|
type clientArg struct {
|
||||||
projectId string
|
projectID string
|
||||||
instanceId string
|
instanceID string
|
||||||
region string
|
region string
|
||||||
userId int64
|
userID int64
|
||||||
}
|
}
|
||||||
|
|
||||||
// ImportState imports a resource into the Terraform state on success.
|
// ImportState imports a resource into the Terraform state on success.
|
||||||
|
|
@ -637,7 +637,7 @@ func (r *userResource) ImportState(
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
userId, err := strconv.ParseInt(idParts[3], 10, 64)
|
userID, err := strconv.ParseInt(idParts[3], 10, 64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
core.LogAndAddError(
|
core.LogAndAddError(
|
||||||
ctx,
|
ctx,
|
||||||
|
|
@ -651,7 +651,7 @@ func (r *userResource) ImportState(
|
||||||
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
|
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
|
||||||
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
|
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
|
||||||
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
|
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
|
||||||
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userId)...)
|
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userID)...)
|
||||||
|
|
||||||
tflog.Info(ctx, "Postgres Flex user state imported")
|
tflog.Info(ctx, "Postgres Flex user state imported")
|
||||||
|
|
||||||
|
|
@ -665,15 +665,15 @@ func (r *userResource) ImportState(
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
projectId := identityData.ProjectID.ValueString()
|
projectID := identityData.ProjectID.ValueString()
|
||||||
region := identityData.Region.ValueString()
|
region := identityData.Region.ValueString()
|
||||||
instanceId := identityData.InstanceID.ValueString()
|
instanceID := identityData.InstanceID.ValueString()
|
||||||
userId := identityData.UserID.ValueInt64()
|
userID := identityData.UserID.ValueInt64()
|
||||||
|
|
||||||
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
|
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectID)...)
|
||||||
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
|
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
|
||||||
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...)
|
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceID)...)
|
||||||
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userId)...)
|
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("user_id"), userID)...)
|
||||||
|
|
||||||
tflog.Info(ctx, "Postgres Flex user state imported")
|
tflog.Info(ctx, "Postgres Flex user state imported")
|
||||||
}
|
}
|
||||||
|
|
@ -683,25 +683,24 @@ func (r *userResource) extractIdentityData(
|
||||||
model resourceModel,
|
model resourceModel,
|
||||||
identity UserResourceIdentityModel,
|
identity UserResourceIdentityModel,
|
||||||
) (*clientArg, error) {
|
) (*clientArg, error) {
|
||||||
var projectId, region, instanceId string
|
var projectID, region, instanceID string
|
||||||
var userId int64
|
var userID int64
|
||||||
|
|
||||||
if !model.UserId.IsNull() && !model.UserId.IsUnknown() {
|
if !model.UserId.IsNull() && !model.UserId.IsUnknown() {
|
||||||
userId = model.UserId.ValueInt64()
|
userID = model.UserId.ValueInt64()
|
||||||
} else {
|
} else {
|
||||||
if identity.UserID.IsNull() || identity.UserID.IsUnknown() {
|
if identity.UserID.IsNull() || identity.UserID.IsUnknown() {
|
||||||
return nil, fmt.Errorf("user_id not found in config")
|
return nil, fmt.Errorf("user_id not found in config")
|
||||||
}
|
}
|
||||||
userId = identity.UserID.ValueInt64()
|
userID = identity.UserID.ValueInt64()
|
||||||
}
|
}
|
||||||
|
|
||||||
if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() {
|
if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() {
|
||||||
projectId = model.ProjectId.ValueString()
|
projectID = model.ProjectId.ValueString()
|
||||||
} else {
|
} else {
|
||||||
if identity.ProjectID.IsNull() || identity.ProjectID.IsUnknown() {
|
if identity.ProjectID.IsNull() || identity.ProjectID.IsUnknown() {
|
||||||
return nil, fmt.Errorf("project_id not found in config")
|
return nil, fmt.Errorf("project_id not found in config")
|
||||||
}
|
}
|
||||||
projectId = identity.ProjectID.ValueString()
|
projectID = identity.ProjectID.ValueString()
|
||||||
}
|
}
|
||||||
|
|
||||||
if !model.Region.IsNull() && !model.Region.IsUnknown() {
|
if !model.Region.IsNull() && !model.Region.IsUnknown() {
|
||||||
|
|
@ -714,27 +713,27 @@ func (r *userResource) extractIdentityData(
|
||||||
}
|
}
|
||||||
|
|
||||||
if !model.InstanceId.IsNull() && !model.InstanceId.IsUnknown() {
|
if !model.InstanceId.IsNull() && !model.InstanceId.IsUnknown() {
|
||||||
instanceId = model.InstanceId.ValueString()
|
instanceID = model.InstanceId.ValueString()
|
||||||
} else {
|
} else {
|
||||||
if identity.InstanceID.IsNull() || identity.InstanceID.IsUnknown() {
|
if identity.InstanceID.IsNull() || identity.InstanceID.IsUnknown() {
|
||||||
return nil, fmt.Errorf("instance_id not found in config")
|
return nil, fmt.Errorf("instance_id not found in config")
|
||||||
}
|
}
|
||||||
instanceId = identity.InstanceID.ValueString()
|
instanceID = identity.InstanceID.ValueString()
|
||||||
}
|
}
|
||||||
return &clientArg{
|
return &clientArg{
|
||||||
projectId: projectId,
|
projectID: projectID,
|
||||||
instanceId: instanceId,
|
instanceID: instanceID,
|
||||||
region: region,
|
region: region,
|
||||||
userId: userId,
|
userID: userID,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// setTFLogFields adds relevant fields to the context for terraform logging purposes.
|
// setTFLogFields adds relevant fields to the context for terraform logging purposes.
|
||||||
func (r *userResource) setTFLogFields(ctx context.Context, arg *clientArg) context.Context {
|
func (r *userResource) setTFLogFields(ctx context.Context, arg *clientArg) context.Context {
|
||||||
ctx = tflog.SetField(ctx, "project_id", arg.projectId)
|
ctx = tflog.SetField(ctx, "project_id", arg.projectID)
|
||||||
ctx = tflog.SetField(ctx, "instance_id", arg.instanceId)
|
ctx = tflog.SetField(ctx, "instance_id", arg.instanceID)
|
||||||
ctx = tflog.SetField(ctx, "region", arg.region)
|
ctx = tflog.SetField(ctx, "region", arg.region)
|
||||||
ctx = tflog.SetField(ctx, "user_id", arg.userId)
|
ctx = tflog.SetField(ctx, "user_id", arg.userID)
|
||||||
|
|
||||||
return ctx
|
return ctx
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -9,7 +9,7 @@ import (
|
||||||
"github.com/hashicorp/terraform-plugin-framework/diag"
|
"github.com/hashicorp/terraform-plugin-framework/diag"
|
||||||
"github.com/stackitcloud/stackit-sdk-go/core/config"
|
"github.com/stackitcloud/stackit-sdk-go/core/config"
|
||||||
|
|
||||||
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
|
postgresflex "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
|
||||||
|
|
|
||||||
|
|
@ -15,7 +15,7 @@ import (
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
|
||||||
|
|
||||||
postgresflex "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
|
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
|
@ -38,7 +38,7 @@ func TestConfigureClient(t *testing.T) {
|
||||||
name string
|
name string
|
||||||
args args
|
args args
|
||||||
wantErr bool
|
wantErr bool
|
||||||
expected *postgresflex.APIClient
|
expected *v3alpha1api.APIClient
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "default endpoint",
|
name: "default endpoint",
|
||||||
|
|
@ -47,8 +47,8 @@ func TestConfigureClient(t *testing.T) {
|
||||||
Version: testVersion,
|
Version: testVersion,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
expected: func() *postgresflex.APIClient {
|
expected: func() *v3alpha1api.APIClient {
|
||||||
apiClient, err := postgresflex.NewAPIClient(
|
apiClient, err := v3alpha1api.NewAPIClient(
|
||||||
config.WithRegion("eu01"),
|
config.WithRegion("eu01"),
|
||||||
utils.UserAgentConfigOption(testVersion),
|
utils.UserAgentConfigOption(testVersion),
|
||||||
)
|
)
|
||||||
|
|
@ -67,8 +67,8 @@ func TestConfigureClient(t *testing.T) {
|
||||||
PostgresFlexCustomEndpoint: testCustomEndpoint,
|
PostgresFlexCustomEndpoint: testCustomEndpoint,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
expected: func() *postgresflex.APIClient {
|
expected: func() *v3alpha1api.APIClient {
|
||||||
apiClient, err := postgresflex.NewAPIClient(
|
apiClient, err := v3alpha1api.NewAPIClient(
|
||||||
utils.UserAgentConfigOption(testVersion),
|
utils.UserAgentConfigOption(testVersion),
|
||||||
config.WithEndpoint(testCustomEndpoint),
|
config.WithEndpoint(testCustomEndpoint),
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -16,7 +16,8 @@ import (
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
|
||||||
|
|
||||||
sqlserverflexalphaPkg "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
|
sqlserverflexalphaPkg "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3alpha1api"
|
||||||
|
|
||||||
sqlserverflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/database/datasources_gen"
|
sqlserverflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/database/datasources_gen"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -119,7 +120,7 @@ func (d *databaseDataSource) Read(ctx context.Context, req datasource.ReadReques
|
||||||
|
|
||||||
databaseName := data.DatabaseName.ValueString()
|
databaseName := data.DatabaseName.ValueString()
|
||||||
|
|
||||||
databaseResp, err := d.client.GetDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute()
|
databaseResp, err := d.client.DefaultAPI.GetDatabaseRequest(ctx, projectId, region, instanceId, databaseName).Execute()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
handleReadError(ctx, &resp.Diagnostics, err, projectId, instanceId)
|
handleReadError(ctx, &resp.Diagnostics, err, projectId, instanceId)
|
||||||
resp.State.RemoveResource(ctx)
|
resp.State.RemoveResource(ctx)
|
||||||
|
|
@ -142,7 +143,7 @@ func (d *databaseDataSource) Read(ctx context.Context, req datasource.ReadReques
|
||||||
// Save data into Terraform state
|
// Save data into Terraform state
|
||||||
resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
|
resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
|
||||||
|
|
||||||
tflog.Info(ctx, "SQL Server Flex beta database read")
|
tflog.Info(ctx, "SQL Server Flex Alpha database read")
|
||||||
}
|
}
|
||||||
|
|
||||||
// handleReadError centralizes API error handling for the Read operation.
|
// handleReadError centralizes API error handling for the Read operation.
|
||||||
|
|
|
||||||
|
|
@ -5,8 +5,10 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/hashicorp/terraform-plugin-framework/types"
|
"github.com/hashicorp/terraform-plugin-framework/types"
|
||||||
|
coreUtils "github.com/stackitcloud/stackit-sdk-go/core/utils"
|
||||||
|
|
||||||
|
sqlserverflexalpha "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3alpha1api"
|
||||||
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -15,7 +17,7 @@ func mapFields(source *sqlserverflexalpha.GetDatabaseResponse, model *dataSource
|
||||||
if source == nil {
|
if source == nil {
|
||||||
return fmt.Errorf("response is nil")
|
return fmt.Errorf("response is nil")
|
||||||
}
|
}
|
||||||
if source.Id == nil || *source.Id == 0 {
|
if source.Id == 0 {
|
||||||
return fmt.Errorf("id not present")
|
return fmt.Errorf("id not present")
|
||||||
}
|
}
|
||||||
if model == nil {
|
if model == nil {
|
||||||
|
|
@ -25,8 +27,8 @@ func mapFields(source *sqlserverflexalpha.GetDatabaseResponse, model *dataSource
|
||||||
var databaseId int64
|
var databaseId int64
|
||||||
if model.Id.ValueInt64() != 0 {
|
if model.Id.ValueInt64() != 0 {
|
||||||
databaseId = model.Id.ValueInt64()
|
databaseId = model.Id.ValueInt64()
|
||||||
} else if source.Id != nil {
|
} else if source.Id != 0 {
|
||||||
databaseId = *source.Id
|
databaseId = source.Id
|
||||||
} else {
|
} else {
|
||||||
return fmt.Errorf("database id not present")
|
return fmt.Errorf("database id not present")
|
||||||
}
|
}
|
||||||
|
|
@ -38,7 +40,7 @@ func mapFields(source *sqlserverflexalpha.GetDatabaseResponse, model *dataSource
|
||||||
model.Region = types.StringValue(region)
|
model.Region = types.StringValue(region)
|
||||||
model.ProjectId = types.StringValue(model.ProjectId.ValueString())
|
model.ProjectId = types.StringValue(model.ProjectId.ValueString())
|
||||||
model.InstanceId = types.StringValue(model.InstanceId.ValueString())
|
model.InstanceId = types.StringValue(model.InstanceId.ValueString())
|
||||||
model.CompatibilityLevel = types.Int64Value(source.GetCompatibilityLevel())
|
model.CompatibilityLevel = types.Int64Value(int64(source.GetCompatibilityLevel()))
|
||||||
model.CollationName = types.StringValue(source.GetCollationName())
|
model.CollationName = types.StringValue(source.GetCollationName())
|
||||||
|
|
||||||
model.TerraformId = utils.BuildInternalTerraformId(
|
model.TerraformId = utils.BuildInternalTerraformId(
|
||||||
|
|
@ -56,7 +58,7 @@ func mapResourceFields(source *sqlserverflexalpha.GetDatabaseResponse, model *re
|
||||||
if source == nil {
|
if source == nil {
|
||||||
return fmt.Errorf("response is nil")
|
return fmt.Errorf("response is nil")
|
||||||
}
|
}
|
||||||
if source.Id == nil || *source.Id == 0 {
|
if source.Id == 0 {
|
||||||
return fmt.Errorf("id not present")
|
return fmt.Errorf("id not present")
|
||||||
}
|
}
|
||||||
if model == nil {
|
if model == nil {
|
||||||
|
|
@ -66,8 +68,8 @@ func mapResourceFields(source *sqlserverflexalpha.GetDatabaseResponse, model *re
|
||||||
var databaseId int64
|
var databaseId int64
|
||||||
if model.Id.ValueInt64() != 0 {
|
if model.Id.ValueInt64() != 0 {
|
||||||
databaseId = model.Id.ValueInt64()
|
databaseId = model.Id.ValueInt64()
|
||||||
} else if source.Id != nil {
|
} else if source.Id != 0 {
|
||||||
databaseId = *source.Id
|
databaseId = source.Id
|
||||||
} else {
|
} else {
|
||||||
return fmt.Errorf("database id not present")
|
return fmt.Errorf("database id not present")
|
||||||
}
|
}
|
||||||
|
|
@ -80,8 +82,8 @@ func mapResourceFields(source *sqlserverflexalpha.GetDatabaseResponse, model *re
|
||||||
model.ProjectId = types.StringValue(model.ProjectId.ValueString())
|
model.ProjectId = types.StringValue(model.ProjectId.ValueString())
|
||||||
model.InstanceId = types.StringValue(model.InstanceId.ValueString())
|
model.InstanceId = types.StringValue(model.InstanceId.ValueString())
|
||||||
|
|
||||||
model.Compatibility = types.Int64Value(source.GetCompatibilityLevel())
|
model.Compatibility = types.Int64Value(int64(source.GetCompatibilityLevel()))
|
||||||
model.CompatibilityLevel = types.Int64Value(source.GetCompatibilityLevel())
|
model.CompatibilityLevel = types.Int64Value(int64(source.GetCompatibilityLevel()))
|
||||||
|
|
||||||
model.Collation = types.StringValue(source.GetCollationName()) // it does not come back from api
|
model.Collation = types.StringValue(source.GetCollationName()) // it does not come back from api
|
||||||
model.CollationName = types.StringValue(source.GetCollationName())
|
model.CollationName = types.StringValue(source.GetCollationName())
|
||||||
|
|
@ -96,9 +98,9 @@ func toCreatePayload(model *resourceModel) (*sqlserverflexalpha.CreateDatabaseRe
|
||||||
}
|
}
|
||||||
|
|
||||||
return &sqlserverflexalpha.CreateDatabaseRequestPayload{
|
return &sqlserverflexalpha.CreateDatabaseRequestPayload{
|
||||||
Name: model.Name.ValueStringPointer(),
|
Name: model.Name.ValueString(),
|
||||||
Owner: model.Owner.ValueStringPointer(),
|
Owner: model.Owner.ValueString(),
|
||||||
Collation: model.Collation.ValueStringPointer(),
|
Collation: model.Collation.ValueStringPointer(),
|
||||||
Compatibility: model.Compatibility.ValueInt64Pointer(),
|
Compatibility: coreUtils.Ptr(int32(model.Compatibility.ValueInt64())), //nolint:gosec // TODO
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -6,8 +6,8 @@ import (
|
||||||
"github.com/google/go-cmp/cmp"
|
"github.com/google/go-cmp/cmp"
|
||||||
"github.com/hashicorp/terraform-plugin-framework/types"
|
"github.com/hashicorp/terraform-plugin-framework/types"
|
||||||
"github.com/stackitcloud/stackit-sdk-go/core/utils"
|
"github.com/stackitcloud/stackit-sdk-go/core/utils"
|
||||||
|
sqlserverflexalpha "github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex/v3alpha1api"
|
||||||
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
|
|
||||||
datasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/database/datasources_gen"
|
datasource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/database/datasources_gen"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -31,11 +31,11 @@ func TestMapFields(t *testing.T) {
|
||||||
name: "should map fields correctly",
|
name: "should map fields correctly",
|
||||||
given: given{
|
given: given{
|
||||||
source: &sqlserverflexalpha.GetDatabaseResponse{
|
source: &sqlserverflexalpha.GetDatabaseResponse{
|
||||||
Id: utils.Ptr(int64(1)),
|
Id: (int64(1)),
|
||||||
Name: utils.Ptr("my-db"),
|
Name: ("my-db"),
|
||||||
CollationName: utils.Ptr("collation"),
|
CollationName: ("collation"),
|
||||||
CompatibilityLevel: utils.Ptr(int64(150)),
|
CompatibilityLevel: (int32(150)),
|
||||||
Owner: utils.Ptr("my-owner"),
|
Owner: ("my-owner"),
|
||||||
},
|
},
|
||||||
model: &dataSourceModel{
|
model: &dataSourceModel{
|
||||||
DatabaseModel: datasource.DatabaseModel{
|
DatabaseModel: datasource.DatabaseModel{
|
||||||
|
|
@ -73,7 +73,7 @@ func TestMapFields(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "should fail on nil source ID",
|
name: "should fail on nil source ID",
|
||||||
given: given{
|
given: given{
|
||||||
source: &sqlserverflexalpha.GetDatabaseResponse{Id: nil},
|
source: &sqlserverflexalpha.GetDatabaseResponse{Id: 0},
|
||||||
model: &dataSourceModel{},
|
model: &dataSourceModel{},
|
||||||
},
|
},
|
||||||
expected: expected{err: true},
|
expected: expected{err: true},
|
||||||
|
|
@ -81,7 +81,7 @@ func TestMapFields(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "should fail on nil model",
|
name: "should fail on nil model",
|
||||||
given: given{
|
given: given{
|
||||||
source: &sqlserverflexalpha.GetDatabaseResponse{Id: utils.Ptr(int64(1))},
|
source: &sqlserverflexalpha.GetDatabaseResponse{Id: (int64(1))},
|
||||||
model: nil,
|
model: nil,
|
||||||
},
|
},
|
||||||
expected: expected{err: true},
|
expected: expected{err: true},
|
||||||
|
|
@ -125,9 +125,9 @@ func TestMapResourceFields(t *testing.T) {
|
||||||
name: "should map fields correctly",
|
name: "should map fields correctly",
|
||||||
given: given{
|
given: given{
|
||||||
source: &sqlserverflexalpha.GetDatabaseResponse{
|
source: &sqlserverflexalpha.GetDatabaseResponse{
|
||||||
Id: utils.Ptr(int64(1)),
|
Id: (int64(1)),
|
||||||
Name: utils.Ptr("my-db"),
|
Name: ("my-db"),
|
||||||
Owner: utils.Ptr("my-owner"),
|
Owner: ("my-owner"),
|
||||||
},
|
},
|
||||||
model: &resourceModel{
|
model: &resourceModel{
|
||||||
ProjectId: types.StringValue("my-project"),
|
ProjectId: types.StringValue("my-project"),
|
||||||
|
|
@ -202,8 +202,9 @@ func TestToCreatePayload(t *testing.T) {
|
||||||
},
|
},
|
||||||
expected: expected{
|
expected: expected{
|
||||||
payload: &sqlserverflexalpha.CreateDatabaseRequestPayload{
|
payload: &sqlserverflexalpha.CreateDatabaseRequestPayload{
|
||||||
Name: utils.Ptr("my-db"),
|
Name: "my-db",
|
||||||
Owner: utils.Ptr("my-owner"),
|
Owner: "my-owner",
|
||||||
|
Compatibility: utils.Ptr(int32(0)),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue