Compare commits
180 commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 6b5b97ac9a | |||
|
|
29cf260d56 | ||
| 4e217e20a4 | |||
|
|
2f2b5cd04a | ||
| 6c26089930 | |||
| c31a8a787a | |||
| 515b7b1357 | |||
| e6d2d3e10a | |||
| 8fee76f037 | |||
| ebb3ec051d | |||
|
|
e5b2681142 | ||
| e65c74e6e7 | |||
| afde034d64 | |||
|
|
7b911ebf70 | ||
| e320e80956 | |||
|
|
4d96cc8951 | ||
| f9a3d4265c | |||
|
|
1298c3fbf1 | ||
| fa641d29c5 | |||
|
|
dab47d4690 | ||
| a0351798e0 | |||
| f7b022ec6c | |||
|
|
6e3d2b51fa | ||
|
|
9ef7ce9029 | ||
| aac1a2ba17 | |||
| d16b97a8e9 | |||
|
|
edc5e869ce | ||
| 7b2dfaea44 | |||
| 2893a11c0a | |||
| 0b5e15d1c5 | |||
|
|
8a6d932107 | ||
| 956cedef08 | |||
|
|
d5b4e02437 | ||
|
|
bb15293dd3 | ||
|
|
6e6e771631 | ||
|
|
683bc64c12 | ||
|
|
064d2cf1db | ||
|
|
722c46b12d | ||
| 03776cc7fd | |||
| 9060aa9f6a | |||
|
|
5a1dc9cd94 | ||
| d34927537a | |||
|
|
83b0860115 | ||
|
|
97645d7a66 | ||
|
|
94f3dad963 | ||
|
|
c081f5f7bd | ||
|
|
8ab8656b5c | ||
|
|
63435e5e63 | ||
|
|
e974ccf906 | ||
|
|
1fd2df5c76 | ||
|
|
aaabadde1a | ||
|
|
0a5bc30d9c | ||
|
|
8ff6b2a4d8 | ||
| 3dbb04c917 | |||
| a1d1750a79 | |||
| 76af35f27d | |||
| 01deb9022d | |||
|
|
91fe23f5e8 | ||
|
|
4913ff1c3a | ||
| dd77da71dd | |||
|
|
431f6eff8c | ||
|
|
a310d1454a | ||
| 3790894563 | |||
| f173fd54fe | |||
| 1033d7e034 | |||
|
|
635a9abf20 | ||
|
|
07458c5677 | ||
|
|
eb13630d2f | ||
| 4a2819787d | |||
|
|
36eccc52c3 | ||
|
|
841e702b95 | ||
|
|
aba831cbdd | ||
|
|
89a24ce780 | ||
|
|
f05e90c35a | ||
|
|
7ee82366d7 | ||
| d5644ec27f | |||
|
|
20e9b3ca4c | ||
|
|
43223f5d1f | ||
|
|
452f73877f | ||
|
|
55a0917a86 | ||
|
|
d90236b02e | ||
|
|
b1f8c8a4d9 | ||
|
|
e01ae1a920 | ||
|
|
843fc46f54 | ||
|
|
10af1dbbba | ||
|
|
459120d3b3 | ||
|
|
82c654f3ba | ||
|
|
0c9ecfc670 | ||
| 131e1700bb | |||
|
|
86fc98461c | ||
|
|
ed7ff0f58e | ||
|
|
f2bffa9ece | ||
| 399e8ccb0c | |||
|
|
e21fe64326 | ||
| 4991897eca | |||
|
|
b737875c68 | ||
|
|
9dbf36dd35 | ||
|
|
00a43dfb4c | ||
|
|
b63526b065 | ||
|
|
68e4c137f1 | ||
|
|
6e23dab949 | ||
| de019908d2 | |||
|
|
b1b359f436 | ||
|
|
32e41d8b44 | ||
|
|
c22e758b2c | ||
|
|
581e45eb9c | ||
|
|
4549ba63e5 | ||
|
|
80d1d12278 | ||
|
|
5e9051fb89 | ||
|
|
e7176b2eef | ||
|
|
2c0e8e874e | ||
|
|
3dbf79c95f | ||
|
|
a5a388f238 | ||
|
|
cd390b1dfc | ||
|
|
9242a9526c | ||
|
|
760bcfd658 | ||
|
|
024534da5f | ||
|
|
4e479a365b | ||
|
|
50184f4478 | ||
|
|
8532f09ce2 | ||
|
|
ee89243b3a | ||
|
|
70f7492043 | ||
|
|
4153035eae | ||
|
|
f3d66c7569 | ||
|
|
a9df5b0ff5 | ||
|
|
5ec2ab8c67 | ||
|
|
bb54206ba9 | ||
|
|
7fe135f337 | ||
|
|
0a21d9f0f2 | ||
|
|
37ba538a8e | ||
|
|
f986d919da | ||
|
|
5e303ba8dd | ||
|
|
3149537a4a | ||
|
|
9f41c4da7f | ||
| 979220be66 | |||
|
|
0150fea302 | ||
|
|
910551f09d | ||
|
|
e2f2cecdf7 | ||
|
|
468841db57 | ||
|
|
6b513231b3 | ||
|
|
b374a2b300 | ||
|
|
29f693308e | ||
|
|
1027643f95 | ||
|
|
1c0ba50668 | ||
|
|
9a18db49ad | ||
|
|
7d66568003 | ||
|
|
318d2e0962 | ||
|
|
5cab4b79ee | ||
|
|
81f422f944 | ||
|
|
a5ff1146a0 | ||
|
|
dac41ed65e | ||
|
|
8998ef0332 | ||
|
|
ff9f47edc3 | ||
|
|
5b6576da1c | ||
|
|
25fb4453f0 | ||
|
|
126e22414d | ||
|
|
3a24b9e930 | ||
|
|
76b61859da | ||
|
|
b08b32ef1d | ||
|
|
d01ae71b75 | ||
|
|
6aac685ed4 | ||
|
|
f0414ab3b9 | ||
|
|
9e04ab2630 | ||
|
|
99f2853ae5 | ||
|
|
c9193e0237 | ||
|
|
944a75295f | ||
|
|
db080dc89a | ||
|
|
70db08f43f | ||
|
|
08afbcf574 | ||
|
|
ce2f3fca00 | ||
|
|
75e003ae9a | ||
|
|
7de63b5b8e | ||
|
|
feef0b61d6 | ||
|
|
a861661036 | ||
|
|
5381516661 | ||
|
|
df25ceffd4 | ||
|
|
c07c81b091 | ||
|
|
3b07f3f1d3 | ||
|
|
48b1c99ede | ||
|
|
30070d8470 |
132 changed files with 6038 additions and 3947 deletions
140
.github/actions/acc_test/action.yaml
vendored
140
.github/actions/acc_test/action.yaml
vendored
|
|
@ -2,14 +2,19 @@ name: Acceptance Testing
|
||||||
description: "Acceptance Testing pipeline"
|
description: "Acceptance Testing pipeline"
|
||||||
|
|
||||||
inputs:
|
inputs:
|
||||||
|
tf_debug:
|
||||||
|
description: "enable terraform debug logs"
|
||||||
|
default: 'false'
|
||||||
|
required: true
|
||||||
|
|
||||||
test_timeout_string:
|
test_timeout_string:
|
||||||
description: "string that determines the timeout (default: 45m)"
|
description: "string that determines the timeout (default: 45m)"
|
||||||
default: '45m'
|
default: '90m'
|
||||||
required: true
|
required: true
|
||||||
|
|
||||||
go-version:
|
go-version:
|
||||||
description: "go version to install"
|
description: "go version to install"
|
||||||
default: '1.25'
|
default: '1.26'
|
||||||
required: true
|
required: true
|
||||||
|
|
||||||
project_id:
|
project_id:
|
||||||
|
|
@ -60,20 +65,18 @@ inputs:
|
||||||
description: "testfile to run"
|
description: "testfile to run"
|
||||||
default: ''
|
default: ''
|
||||||
|
|
||||||
|
outputs:
|
||||||
|
result:
|
||||||
|
value: ${{ steps.testrun.outputs.result }}
|
||||||
|
description: "the output of the tests"
|
||||||
|
|
||||||
#outputs:
|
status:
|
||||||
# random-number:
|
value: ${{ steps.status.outputs.status }}
|
||||||
# description: "Random number"
|
description: "the status of the tests"
|
||||||
# value: ${{ steps.random-number-generator.outputs.random-number }}
|
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
steps:
|
steps:
|
||||||
# - name: Random Number Generator
|
|
||||||
# id: random-number-generator
|
|
||||||
# run: echo "random-number=$(echo $RANDOM)" >> $GITHUB_OUTPUT
|
|
||||||
# shell: bash
|
|
||||||
|
|
||||||
- name: Install needed tools
|
- name: Install needed tools
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
|
|
@ -89,6 +92,15 @@ runs:
|
||||||
fi
|
fi
|
||||||
echo "::endgroup::"
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
# Install latest version of Terraform
|
||||||
|
- uses: hashicorp/setup-terraform@v4
|
||||||
|
with:
|
||||||
|
terraform_wrapper: false
|
||||||
|
|
||||||
|
- uses: actions/setup-node@v6
|
||||||
|
with:
|
||||||
|
node-version: '24.x'
|
||||||
|
|
||||||
- name: Setup JAVA
|
- name: Setup JAVA
|
||||||
uses: actions/setup-java@v5
|
uses: actions/setup-java@v5
|
||||||
with:
|
with:
|
||||||
|
|
@ -141,6 +153,12 @@ runs:
|
||||||
${{ steps.goenv.outputs.gomodcache }}
|
${{ steps.goenv.outputs.gomodcache }}
|
||||||
key: ${{ runner.os }}-gopkg
|
key: ${{ runner.os }}-gopkg
|
||||||
|
|
||||||
|
- name: Define service account file path variable
|
||||||
|
id: service_account
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo "safilepath=${PWD}/stackit/${{ inputs.service_account_json_file_path }}" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
- name: Creating service_account file from json input
|
- name: Creating service_account file from json input
|
||||||
if: inputs.service_account_json_content != ''
|
if: inputs.service_account_json_content != ''
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
@ -185,61 +203,35 @@ runs:
|
||||||
ls -l stackit/"${{ inputs.service_account_json_file_path }}"
|
ls -l stackit/"${{ inputs.service_account_json_file_path }}"
|
||||||
echo "::endgroup::"
|
echo "::endgroup::"
|
||||||
|
|
||||||
- name: Run acceptance test file
|
|
||||||
if: ${{ inputs.test_file != '' }}
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
echo "::group::go test file"
|
|
||||||
set -e
|
|
||||||
set -o pipefail
|
|
||||||
|
|
||||||
echo "Running acceptance tests for the terraform provider"
|
|
||||||
cd stackit || exit 1
|
|
||||||
TF_ACC=1 \
|
|
||||||
TF_ACC_PROJECT_ID=${TF_ACC_PROJECT_ID} \
|
|
||||||
TF_ACC_REGION=${TF_ACC_REGION} \
|
|
||||||
TF_ACC_TEST_PROJECT_USER_EMAIL=${TF_ACC_TEST_PROJECT_USER_EMAIL} \
|
|
||||||
TF_ACC_SERVICE_ACCOUNT_FILE="${PWD}/${{ inputs.service_account_json_file_path }}" \
|
|
||||||
TF_ACC_KEK_KEY_ID=${TF_ACC_KEK_KEY_ID} \
|
|
||||||
TF_ACC_KEK_KEY_RING_ID=${TF_ACC_KEK_KEY_RING_ID} \
|
|
||||||
TF_ACC_KEK_KEY_VERSION=${TF_ACC_KEK_KEY_VERSION} \
|
|
||||||
TF_ACC_KEK_SERVICE_ACCOUNT=${TF_ACC_KEK_SERVICE_ACCOUNT} \
|
|
||||||
go test ${{ inputs.test_file }} -count=1 -timeout=${{ inputs.test_timeout_string }}
|
|
||||||
echo "::endgroup::"
|
|
||||||
env:
|
|
||||||
TF_ACC_PROJECT_ID: ${{ inputs.project_id }}
|
|
||||||
TF_ACC_REGION: ${{ inputs.region }}
|
|
||||||
TF_ACC_TEST_PROJECT_USER_EMAIL: ${{ inputs.project_user_email }}
|
|
||||||
TF_ACC_KEK_KEY_ID: ${{ inputs.tf_acc_kek_key_id }}
|
|
||||||
TF_ACC_KEK_KEY_RING_ID: ${{ inputs.tf_acc_kek_key_ring_id }}
|
|
||||||
TF_ACC_KEK_KEY_VERSION: ${{ inputs.tf_acc_kek_key_version }}
|
|
||||||
TF_ACC_KEK_SERVICE_ACCOUNT: ${{ inputs.tf_acc_kek_service_account }}
|
|
||||||
|
|
||||||
# - name: Run test action
|
|
||||||
# if: ${{ inputs.test_file == '' }}
|
|
||||||
# env:
|
|
||||||
# TF_ACC: 1
|
|
||||||
# TF_ACC_PROJECT_ID: ${{ inputs.project_id }}
|
|
||||||
# TF_ACC_REGION: ${{ inputs.region }}
|
|
||||||
# TF_ACC_TEST_PROJECT_USER_EMAIL: ${{ inputs.project_user_email }}
|
|
||||||
# TF_ACC_KEK_KEY_ID: ${{ inputs.tf_acc_kek_key_id }}
|
|
||||||
# TF_ACC_KEK_KEY_RING_ID: ${{ inputs.tf_acc_kek_key_ring_id }}
|
|
||||||
# TF_ACC_KEK_KEY_VERSION: ${{ inputs.tf_acc_kek_key_version }}
|
|
||||||
# TF_ACC_KEK_SERVICE_ACCOUNT: ${{ inputs.tf_acc_kek_service_account }}
|
|
||||||
# TF_ACC_SERVICE_ACCOUNT_FILE: "${PWD}/${{ inputs.service_account_json_file_path }}"
|
|
||||||
# uses: robherley/go-test-action@v0.1.0
|
|
||||||
# with:
|
|
||||||
# testArguments: "./... -timeout 45m"
|
|
||||||
|
|
||||||
- name: Run acceptance tests
|
- name: Run acceptance tests
|
||||||
if: ${{ inputs.test_file == '' }}
|
id: testrun
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
echo "::group::go test all"
|
echo "::group::go test"
|
||||||
set -e
|
set -e
|
||||||
set -o pipefail
|
set -o pipefail
|
||||||
|
|
||||||
|
if [[ "${{ inputs.tf_debug }}" == "true" ]]; then
|
||||||
|
TF_LOG=INFO
|
||||||
|
export TF_LOG
|
||||||
|
fi
|
||||||
|
|
||||||
|
testfile="${{ inputs.test_file }}"
|
||||||
|
|
||||||
|
echo "result=no result before run" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
echo "Running acceptance tests for the terraform provider"
|
echo "Running acceptance tests for the terraform provider"
|
||||||
|
|
||||||
|
if [[ -z "$testfile" ]]; then
|
||||||
|
testfile="./..."
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ -z "$testfile" ]]; then
|
||||||
|
echo "ERROR: No test file provided"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
set +e
|
||||||
cd stackit || exit 1
|
cd stackit || exit 1
|
||||||
TF_ACC=1 \
|
TF_ACC=1 \
|
||||||
TF_ACC_PROJECT_ID=${TF_ACC_PROJECT_ID} \
|
TF_ACC_PROJECT_ID=${TF_ACC_PROJECT_ID} \
|
||||||
|
|
@ -250,7 +242,21 @@ runs:
|
||||||
TF_ACC_KEK_KEY_RING_ID=${TF_ACC_KEK_KEY_RING_ID} \
|
TF_ACC_KEK_KEY_RING_ID=${TF_ACC_KEK_KEY_RING_ID} \
|
||||||
TF_ACC_KEK_KEY_VERSION=${TF_ACC_KEK_KEY_VERSION} \
|
TF_ACC_KEK_KEY_VERSION=${TF_ACC_KEK_KEY_VERSION} \
|
||||||
TF_ACC_KEK_SERVICE_ACCOUNT=${TF_ACC_KEK_SERVICE_ACCOUNT} \
|
TF_ACC_KEK_SERVICE_ACCOUNT=${TF_ACC_KEK_SERVICE_ACCOUNT} \
|
||||||
go test ./... -count=1 -timeout=${{ inputs.test_timeout_string }}
|
go test -v ${testfile} -timeout=${{ inputs.test_timeout_string }} | tee -a acc_test_run.log
|
||||||
|
set -e
|
||||||
|
|
||||||
|
have_fail=$(cat acc_test_run.log | grep FAIL)
|
||||||
|
if [[ -n $have_fail ]]; then
|
||||||
|
echo "::endgroup::"
|
||||||
|
echo "::group::go test result"
|
||||||
|
echo "Test failed, see acc_test_run.log for details"
|
||||||
|
echo "${have_fail}"
|
||||||
|
echo "result=<b>FAIL:</b> <br />${have_fail}" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "::endgroup::"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "result=no FAIL detected" >> "$GITHUB_OUTPUT"
|
||||||
echo "::endgroup::"
|
echo "::endgroup::"
|
||||||
env:
|
env:
|
||||||
TF_ACC_PROJECT_ID: ${{ inputs.project_id }}
|
TF_ACC_PROJECT_ID: ${{ inputs.project_id }}
|
||||||
|
|
@ -260,3 +266,17 @@ runs:
|
||||||
TF_ACC_KEK_KEY_RING_ID: ${{ inputs.tf_acc_kek_key_ring_id }}
|
TF_ACC_KEK_KEY_RING_ID: ${{ inputs.tf_acc_kek_key_ring_id }}
|
||||||
TF_ACC_KEK_KEY_VERSION: ${{ inputs.tf_acc_kek_key_version }}
|
TF_ACC_KEK_KEY_VERSION: ${{ inputs.tf_acc_kek_key_version }}
|
||||||
TF_ACC_KEK_SERVICE_ACCOUNT: ${{ inputs.tf_acc_kek_service_account }}
|
TF_ACC_KEK_SERVICE_ACCOUNT: ${{ inputs.tf_acc_kek_service_account }}
|
||||||
|
|
||||||
|
- name: Set status output variable
|
||||||
|
if: always()
|
||||||
|
id: status
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo "status=${{ steps.testrun.outcome == 'success' && 'SUCCESS' || 'FAILURE' }}" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Upload test log artifact
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: acc_test.log
|
||||||
|
path: "stackit/acc_test_run.log"
|
||||||
|
|
|
||||||
2
.github/actions/build/action.yaml
vendored
2
.github/actions/build/action.yaml
vendored
|
|
@ -3,7 +3,7 @@ description: "Build pipeline"
|
||||||
inputs:
|
inputs:
|
||||||
go-version:
|
go-version:
|
||||||
description: "Go version to install"
|
description: "Go version to install"
|
||||||
default: '1.25'
|
default: '1.26'
|
||||||
required: true
|
required: true
|
||||||
java-distribution:
|
java-distribution:
|
||||||
description: "JAVA distribution to use (default: temurin)"
|
description: "JAVA distribution to use (default: temurin)"
|
||||||
|
|
|
||||||
1
.github/actions/clean_up/README.md
vendored
Normal file
1
.github/actions/clean_up/README.md
vendored
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
# acceptance test action
|
||||||
227
.github/actions/clean_up/action.yaml
vendored
Normal file
227
.github/actions/clean_up/action.yaml
vendored
Normal file
|
|
@ -0,0 +1,227 @@
|
||||||
|
name: CleanUp Project Resources
|
||||||
|
description: "Acceptance Testing CleanUp"
|
||||||
|
|
||||||
|
inputs:
|
||||||
|
project_id:
|
||||||
|
description: "STACKIT project ID for tests"
|
||||||
|
required: true
|
||||||
|
|
||||||
|
region:
|
||||||
|
description: "STACKIT region for tests"
|
||||||
|
default: 'eu01'
|
||||||
|
required: true
|
||||||
|
|
||||||
|
tf_resource_prefix:
|
||||||
|
description: "prefix in resource names"
|
||||||
|
default: 'tf-acc-'
|
||||||
|
required: true
|
||||||
|
|
||||||
|
service_account_json_content:
|
||||||
|
description: "STACKIT service account JSON file contents"
|
||||||
|
required: true
|
||||||
|
default: ''
|
||||||
|
|
||||||
|
service_account_json_content_b64:
|
||||||
|
description: "STACKIT service account JSON file contents"
|
||||||
|
required: true
|
||||||
|
default: ''
|
||||||
|
|
||||||
|
list_only:
|
||||||
|
description: "only list resources, DO NOT delete"
|
||||||
|
required: true
|
||||||
|
default: 'true'
|
||||||
|
|
||||||
|
log_level:
|
||||||
|
description: "Log Level"
|
||||||
|
required: true
|
||||||
|
default: 'warning'
|
||||||
|
|
||||||
|
outputs:
|
||||||
|
cli-version:
|
||||||
|
description: "stackit cli version"
|
||||||
|
value: ${{ steps.stackit_version.outputs.version }}
|
||||||
|
|
||||||
|
pre_count:
|
||||||
|
description: "number of resources found"
|
||||||
|
value: ${{ steps.retrieve_pre.outputs.count }}
|
||||||
|
|
||||||
|
post_count:
|
||||||
|
description: "number of resources found"
|
||||||
|
value: ${{ steps.retrieve_post.outputs.count }}
|
||||||
|
|
||||||
|
status:
|
||||||
|
description: "status of the test"
|
||||||
|
value: ${{ steps.status.outputs.status }}
|
||||||
|
|
||||||
|
runs:
|
||||||
|
using: "composite"
|
||||||
|
steps:
|
||||||
|
- name: Install needed tools
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo "::group::apt install"
|
||||||
|
set -e
|
||||||
|
apt-get -y -qq update >apt_update.log 2>apt_update_err.log
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
cat apt_update.log apt_update_err.log
|
||||||
|
fi
|
||||||
|
apt-get -y -qq install curl gnupg jq >apt_get.log 2>apt_get_err.log
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
cat apt_get.log apt_get_err.log
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
echo "::group::apt add source"
|
||||||
|
curl https://packages.stackit.cloud/keys/key.gpg | gpg --dearmor -o /usr/share/keyrings/stackit.gpg
|
||||||
|
echo "deb [signed-by=/usr/share/keyrings/stackit.gpg] https://packages.stackit.cloud/apt/cli stackit main" | tee -a /etc/apt/sources.list.d/stackit.list
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
echo "::group::apt install stackit cli"
|
||||||
|
apt-get -y -qq update >apt_update.log 2>apt_update_err.log
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
cat apt_update.log apt_update_err.log
|
||||||
|
fi
|
||||||
|
apt-get -y -qq install stackit >apt_get.log 2>apt_get_err.log
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
cat apt_get.log apt_get_err.log
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Check stackit cli version
|
||||||
|
id: stackit_version
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
VERSION=$(stackit --version | grep "Version:" | cut -d " " -f 2)
|
||||||
|
echo "stackit cli version: ${VERSION}"
|
||||||
|
echo "version=${VERSION}" >> $GITHUB_OUTPUT
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
- name: Creating service_account file from json input
|
||||||
|
if: inputs.service_account_json_content != ''
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo "::group::create service account file"
|
||||||
|
set -e
|
||||||
|
set -o pipefail
|
||||||
|
|
||||||
|
echo "${{ inputs.service_account_json_content }}" > .svc_acc.json
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Creating service_account file from base64 json input
|
||||||
|
if: inputs.service_account_json_content_b64 != ''
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo "::group::create service account file"
|
||||||
|
set -e
|
||||||
|
set -o pipefail
|
||||||
|
|
||||||
|
echo "${{ inputs.service_account_json_content_b64 }}" | base64 -d > .svc_acc.json
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Check service account file exists
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
if [[ ! -s .svc_acc.json ]]; then
|
||||||
|
echo "ERROR: service account file missing or empty"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Retrieve resources before
|
||||||
|
id: retrieve_pre
|
||||||
|
run: |
|
||||||
|
echo "::group::retrieve resources"
|
||||||
|
set -e
|
||||||
|
echo "authenticating api"
|
||||||
|
STACKIT_SERVICE_ACCOUNT_KEY_PATH="${PWD}/.svc_acc.json"
|
||||||
|
export STACKIT_SERVICE_ACCOUNT_KEY_PATH
|
||||||
|
stackit auth activate-service-account --service-account-key-path .svc_acc.json
|
||||||
|
|
||||||
|
echo "SQL Server Flex resources:"
|
||||||
|
sql_res=$(stackit --verbosity ${{ inputs.log_level }} --project-id "${{ inputs.project_id }}" beta sqlserverflex instance list --output-format json | jq -r '.[] | select(.name | startswith("${{ inputs.tf_resource_prefix }}"))')
|
||||||
|
sql_count=$(echo "$sql_res" | jq -r '.id' | wc -l)
|
||||||
|
|
||||||
|
echo "PostgreSQL Flex resources:"
|
||||||
|
pg_res=$(stackit --verbosity ${{ inputs.log_level }} --project-id "${{ inputs.project_id }}" postgresflex instance list --output-format json | jq -r '.[] | select(.name | startswith("${{ inputs.tf_resource_prefix }}"))')
|
||||||
|
pg_count=$(echo "$pg_res" | jq -r '.id' | wc -l)
|
||||||
|
|
||||||
|
echo "Number of resources found: ${sql_count} SQL Server Flex, ${pg_count} PostgreSQL Flex"
|
||||||
|
echo "count=$(( ${pg_count} + ${sql_count} ))" >> $GITHUB_OUTPUT
|
||||||
|
echo "::endgroup::"
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
- name: Delete SQL Server Flex resources
|
||||||
|
if: ${{ inputs.list_only != 'true' }}
|
||||||
|
id: del_sql
|
||||||
|
run: |
|
||||||
|
echo "::group::delete SQL Server Flex resources"
|
||||||
|
stackit --verbosity ${{ inputs.log_level }} auth activate-service-account --service-account-key-path .svc_acc.json
|
||||||
|
for s in $(stackit --verbosity ${{ inputs.log_level }} --project-id ${{ inputs.project_id }} beta sqlserverflex instance list --output-format json | jq -r '.[] | select(.name | startswith("${{ inputs.tf_resource_prefix }}")) | .id');
|
||||||
|
do
|
||||||
|
stackit --verbosity ${{ inputs.log_level }} -y --project-id ${{ inputs.project_id }} beta sqlserverflex instance delete $s || echo "status=FAILURE" >> ${GITHUB_OUTPUT};
|
||||||
|
done
|
||||||
|
echo "::endgroup::"
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
- name: Skip Delete SQL Server Flex resources
|
||||||
|
if: ${{ inputs.list_only == 'true' }}
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
echo "Skip deleting: list only mode"
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
- name: Delete PostgreSQL Flex resources
|
||||||
|
if: ${{ inputs.list_only != 'true' }}
|
||||||
|
id: del_pg
|
||||||
|
run: |
|
||||||
|
echo "::group::delete PostgreSQL Flex resources"
|
||||||
|
stackit auth activate-service-account --service-account-key-path .svc_acc.json
|
||||||
|
for s in $(stackit --verbosity ${{ inputs.log_level }} --project-id ${{ inputs.project_id }} postgresflex instance list --output-format json | jq -r '.[] | select(.name | startswith("${{ inputs.tf_resource_prefix }}")) | .id');
|
||||||
|
do
|
||||||
|
stackit --verbosity ${{ inputs.log_level }} -y --project-id ${{ inputs.project_id }} postgresflex instance delete "$s" --force || echo "status=FAILURE" >> ${GITHUB_OUTPUT};
|
||||||
|
done
|
||||||
|
echo "::endgroup::"
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
- name: Skip Delete PostgreSQL Flex resources
|
||||||
|
if: ${{ inputs.list_only == 'true' }}
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
echo "Skip deleting: list only mode"
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
- name: Retrieve resources after
|
||||||
|
id: retrieve_post
|
||||||
|
run: |
|
||||||
|
echo "::group::retrieve resources"
|
||||||
|
set -e
|
||||||
|
echo "authenticating api"
|
||||||
|
STACKIT_SERVICE_ACCOUNT_KEY_PATH="${PWD}/.svc_acc.json"
|
||||||
|
export STACKIT_SERVICE_ACCOUNT_KEY_PATH
|
||||||
|
stackit auth activate-service-account --service-account-key-path .svc_acc.json
|
||||||
|
|
||||||
|
echo "SQL Server Flex resources:"
|
||||||
|
sql_res=$(stackit --verbosity ${{ inputs.log_level }} --project-id "${{ inputs.project_id }}" beta sqlserverflex instance list --output-format json | jq -r '.[] | select(.name | startswith("${{ inputs.tf_resource_prefix }}"))')
|
||||||
|
sql_count=$(echo "$sql_res" | jq -r '.id' | wc -l)
|
||||||
|
|
||||||
|
echo "PostgreSQL Flex resources:"
|
||||||
|
pg_res=$(stackit --verbosity ${{ inputs.log_level }} --project-id "${{ inputs.project_id }}" postgresflex instance list --output-format json | jq -r '.[] | select(.name | startswith("${{ inputs.tf_resource_prefix }}"))')
|
||||||
|
pg_count=$(echo "$pg_res" | jq -r '.id' | wc -l)
|
||||||
|
|
||||||
|
echo "Number of resources found: ${sql_count} SQL Server Flex, ${pg_count} PostgreSQL Flex"
|
||||||
|
echo "count=$(( ${pg_count} + ${sql_count} ))" >> $GITHUB_OUTPUT
|
||||||
|
echo "::endgroup::"
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
- name: Set status
|
||||||
|
if: always()
|
||||||
|
id: status
|
||||||
|
run: |
|
||||||
|
status="SUCCESS"
|
||||||
|
if [[ "${{ steps.del_pg.outputs.status }}" == "FAILURE" ]]; then
|
||||||
|
status=FAILURE"
|
||||||
|
elif [[ "${{ steps.del_sql.outputs.status }}" == "FAILURE" ]]; then
|
||||||
|
status=FAILURE"
|
||||||
|
fi
|
||||||
|
echo "status=$status" >> $GITHUB_OUTPUT
|
||||||
|
shell: bash
|
||||||
148
.github/actions/notify/action.yaml
vendored
Normal file
148
.github/actions/notify/action.yaml
vendored
Normal file
|
|
@ -0,0 +1,148 @@
|
||||||
|
name: Send notification via Google Chat
|
||||||
|
description: "Sends a notification to a Google Chat room when a pull request is opened."
|
||||||
|
|
||||||
|
inputs:
|
||||||
|
webhook_url:
|
||||||
|
description: "The URL of the Google Chat webhook."
|
||||||
|
required: true
|
||||||
|
|
||||||
|
title:
|
||||||
|
description: "The title of the notification."
|
||||||
|
required: true
|
||||||
|
|
||||||
|
subtitle:
|
||||||
|
description: "The subtitle of the notification."
|
||||||
|
default: 'no subtitle provided'
|
||||||
|
|
||||||
|
image_slug:
|
||||||
|
description: "The slug of the image to be included in the notification."
|
||||||
|
default: 'git'
|
||||||
|
|
||||||
|
event_author:
|
||||||
|
description: "The author of the event."
|
||||||
|
default: 'unknown'
|
||||||
|
|
||||||
|
event_title:
|
||||||
|
description: "The title of the event."
|
||||||
|
required: true
|
||||||
|
|
||||||
|
event_body:
|
||||||
|
description: "The body of the event."
|
||||||
|
default: 'no body provided'
|
||||||
|
|
||||||
|
event_number:
|
||||||
|
description: "The number of the event."
|
||||||
|
default: 'no number provided'
|
||||||
|
|
||||||
|
event_url:
|
||||||
|
description: "The url of the event."
|
||||||
|
default: 'none'
|
||||||
|
|
||||||
|
status:
|
||||||
|
description: "The status of the event."
|
||||||
|
default: 'UNKNOWN'
|
||||||
|
|
||||||
|
runs:
|
||||||
|
using: "composite"
|
||||||
|
steps:
|
||||||
|
- name: Install prerequisites
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo "::group::apt install"
|
||||||
|
set -e
|
||||||
|
apt update
|
||||||
|
apt install -y curl jq
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Determine status color
|
||||||
|
id: status
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
case "${{ inputs.status }}" in
|
||||||
|
SUCCESS)
|
||||||
|
STATUS_COLOR="006400/228b22"
|
||||||
|
ADD='{"decoratedText": {"startIcon": {"materialIcon": {"name": "check_circle"}},"text": "<b style=\"color: green;\">SUCCESS</b>"}},'
|
||||||
|
;;
|
||||||
|
FAILURE)
|
||||||
|
STATUS_COLOR="8b0000/dc143c"
|
||||||
|
ADD='{"decoratedText": {"startIcon": {"materialIcon": {"name": "stop_circle"}},"text": "<b style=\"color: red;\">FAILURE</b>"}},'
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
STATUS_COLOR="483d8b/6495ed"
|
||||||
|
ADD=''
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
echo "color=${STATUS_COLOR}" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "status_add=${ADD}" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Notify via Google Chat Webhook
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
WEBHOOK: ${{ inputs.webhook_url }}
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
|
||||||
|
PAYLOAD=$(jq -n -r \
|
||||||
|
--arg header "${{ inputs.title }}" \
|
||||||
|
--arg subtitle "${{ inputs.subtitle }}" \
|
||||||
|
--arg imgurl "https://cdn.simpleicons.org/${{ inputs.image_slug }}/${{ steps.status.outputs.color }}" \
|
||||||
|
--arg title "${{ inputs.event_title || 'no event title given' }}" \
|
||||||
|
--arg body "${{ inputs.event_body || 'no event body given' }}" \
|
||||||
|
--arg author "${{ inputs.event_author || 'no event author given' }}" \
|
||||||
|
--arg url "${{ inputs.event_url || github.repositoryUrl || github.server_url }}" \
|
||||||
|
'{ "cardsV2": [ { "cardId": "notify-${{ github.run_id }}", "card": {
|
||||||
|
"header": {
|
||||||
|
"title": "\($header)",
|
||||||
|
"subtitle": "\($subtitle)",
|
||||||
|
"imageUrl": "\($imgurl)",
|
||||||
|
"imageType": "SQUARE"
|
||||||
|
},
|
||||||
|
"sections": [
|
||||||
|
{
|
||||||
|
"header": "\($title)",
|
||||||
|
"collapsible": false,
|
||||||
|
"widgets": [
|
||||||
|
${{ steps.status.outputs.status_add }}
|
||||||
|
{
|
||||||
|
"decoratedText": {
|
||||||
|
"startIcon": {
|
||||||
|
"knownIcon": "PERSON"
|
||||||
|
},
|
||||||
|
"text": "<b>\($author)</b>"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"textParagraph": {
|
||||||
|
"text": "\($body)",
|
||||||
|
"maxLines": 2
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"widgets": [
|
||||||
|
{
|
||||||
|
"buttonList": {
|
||||||
|
"buttons": [
|
||||||
|
{
|
||||||
|
"text": "View Source Event",
|
||||||
|
"type": "FILLED",
|
||||||
|
"onClick": {
|
||||||
|
"openLink": {
|
||||||
|
"url": "\($url)"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}} ] }')
|
||||||
|
|
||||||
|
curl \
|
||||||
|
-X POST \
|
||||||
|
-H 'Content-Type: application/json' \
|
||||||
|
"${{ inputs.webhook_url }}&threadKey=run${{ github.run_id }}&messageReplyOption=REPLY_MESSAGE_FALLBACK_TO_NEW_THREAD" \
|
||||||
|
-d "${PAYLOAD}"
|
||||||
2
.github/actions/setup-cache-go/action.yaml
vendored
2
.github/actions/setup-cache-go/action.yaml
vendored
|
|
@ -10,7 +10,7 @@ inputs:
|
||||||
|
|
||||||
go-version:
|
go-version:
|
||||||
description: "go version to install"
|
description: "go version to install"
|
||||||
default: '1.25'
|
default: '1.26'
|
||||||
required: true
|
required: true
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
|
|
|
||||||
64
.github/workflows/ci_new.yaml
vendored
64
.github/workflows/ci_new.yaml
vendored
|
|
@ -2,6 +2,7 @@ name: CI Workflow
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
|
types: [ opened, synchronize, reopened ]
|
||||||
branches:
|
branches:
|
||||||
- alpha
|
- alpha
|
||||||
- main
|
- main
|
||||||
|
|
@ -27,11 +28,24 @@ jobs:
|
||||||
config:
|
config:
|
||||||
if: ${{ github.event_name != 'schedule' }}
|
if: ${{ github.event_name != 'schedule' }}
|
||||||
name: Check GoReleaser config
|
name: Check GoReleaser config
|
||||||
runs-on: ubuntu-latest
|
runs-on: stackit-docker
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v6
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
|
- name: Notify
|
||||||
|
if: always()
|
||||||
|
uses: ./.github/actions/notify
|
||||||
|
with:
|
||||||
|
webhook_url: ${{ secrets.GOOGLE_WEBHOOK_URL }}
|
||||||
|
title: "[START] CI pipeline (#${{ forgejo.run_number }})"
|
||||||
|
subtitle: "${{ forgejo.event_name }} on ${{ forgejo.ref_name }}"
|
||||||
|
event_title: "${{ forgejo.event_name }} for ${{ forgejo.repository }}"
|
||||||
|
event_author: ${{ forgejo.actor }}
|
||||||
|
event_body: "${{ forgejo.event_name }} on ${{ forgejo.ref }} for ${{ forgejo.repository }}"
|
||||||
|
event_number: ${{ forgejo.run_number }}
|
||||||
|
event_url: "https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/actions/runs/${{ forgejo.run_number }}"
|
||||||
|
|
||||||
- name: Check GoReleaser
|
- name: Check GoReleaser
|
||||||
uses: goreleaser/goreleaser-action@v7
|
uses: goreleaser/goreleaser-action@v7
|
||||||
with:
|
with:
|
||||||
|
|
@ -39,7 +53,7 @@ jobs:
|
||||||
|
|
||||||
prepare:
|
prepare:
|
||||||
name: Prepare GO cache
|
name: Prepare GO cache
|
||||||
runs-on: ubuntu-latest
|
runs-on: stackit-docker
|
||||||
permissions:
|
permissions:
|
||||||
actions: read # Required to identify workflow run.
|
actions: read # Required to identify workflow run.
|
||||||
checks: write # Required to add status summary.
|
checks: write # Required to add status summary.
|
||||||
|
|
@ -101,7 +115,7 @@ jobs:
|
||||||
needs:
|
needs:
|
||||||
- config
|
- config
|
||||||
- prepare
|
- prepare
|
||||||
runs-on: ubuntu-latest
|
runs-on: stackit-docker
|
||||||
permissions:
|
permissions:
|
||||||
actions: read # Required to identify workflow run.
|
actions: read # Required to identify workflow run.
|
||||||
checks: write # Required to add status summary.
|
checks: write # Required to add status summary.
|
||||||
|
|
@ -184,7 +198,7 @@ jobs:
|
||||||
|
|
||||||
testing:
|
testing:
|
||||||
name: CI run tests
|
name: CI run tests
|
||||||
runs-on: ubuntu-latest
|
runs-on: stackit-docker
|
||||||
needs:
|
needs:
|
||||||
- config
|
- config
|
||||||
- prepare
|
- prepare
|
||||||
|
|
@ -218,11 +232,21 @@ jobs:
|
||||||
run: go mod tidy
|
run: go mod tidy
|
||||||
|
|
||||||
- name: Testing
|
- name: Testing
|
||||||
|
if: ${{ github.event_name != 'pull_request' }}
|
||||||
run: |
|
run: |
|
||||||
|
unset TF_ACC
|
||||||
TF_ACC_SERVICE_ACCOUNT_FILE=~/.service_account.json
|
TF_ACC_SERVICE_ACCOUNT_FILE=~/.service_account.json
|
||||||
export TF_ACC_SERVICE_ACCOUNT_FILE
|
export TF_ACC_SERVICE_ACCOUNT_FILE
|
||||||
make test
|
make test
|
||||||
|
|
||||||
|
- name: Testing with coverage
|
||||||
|
if: ${{ github.event_name == 'pull_request' }}
|
||||||
|
run: |
|
||||||
|
unset TF_ACC
|
||||||
|
TF_ACC_SERVICE_ACCOUNT_FILE=~/.service_account.json
|
||||||
|
export TF_ACC_SERVICE_ACCOUNT_FILE
|
||||||
|
make coverage
|
||||||
|
|
||||||
# - name: Acceptance Testing
|
# - name: Acceptance Testing
|
||||||
# env:
|
# env:
|
||||||
# TF_ACC: "1"
|
# TF_ACC: "1"
|
||||||
|
|
@ -232,20 +256,20 @@ jobs:
|
||||||
# export TF_ACC_SERVICE_ACCOUNT_FILE
|
# export TF_ACC_SERVICE_ACCOUNT_FILE
|
||||||
# make test-acceptance-tf
|
# make test-acceptance-tf
|
||||||
|
|
||||||
- name: Run Test
|
# - name: Run Acceptance Test
|
||||||
if: ${{ github.event_name == 'pull_request' }}
|
# if: ${{ github.event_name == 'pull_request' }}
|
||||||
uses: ./.github/actions/acc_test
|
# uses: ./.github/actions/acc_test
|
||||||
with:
|
# with:
|
||||||
go-version: ${{ env.GO_VERSION }}
|
# go-version: ${{ env.GO_VERSION }}
|
||||||
project_id: ${{ vars.TF_ACC_PROJECT_ID }}
|
# project_id: ${{ vars.TF_ACC_PROJECT_ID }}
|
||||||
region: ${{ vars.TF_ACC_REGION }}
|
# region: ${{ vars.TF_ACC_REGION }}
|
||||||
service_account_json_content_b64: "${{ secrets.TF_ACC_SERVICE_ACCOUNT_JSON_B64 }}"
|
# service_account_json_content_b64: "${{ secrets.TF_ACC_SERVICE_ACCOUNT_JSON_B64 }}"
|
||||||
project_user_email: ${{ vars.TEST_PROJECT_USER_EMAIL }}
|
# project_user_email: ${{ vars.TEST_PROJECT_USER_EMAIL }}
|
||||||
tf_acc_kek_key_id: ${{ vars.TF_ACC_KEK_KEY_ID }}
|
# tf_acc_kek_key_id: ${{ vars.TF_ACC_KEK_KEY_ID }}
|
||||||
tf_acc_kek_key_ring_id: ${{ vars.TF_ACC_KEK_KEY_RING_ID }}
|
# tf_acc_kek_key_ring_id: ${{ vars.TF_ACC_KEK_KEY_RING_ID }}
|
||||||
tf_acc_kek_key_version: ${{ vars.TF_ACC_KEK_KEY_VERSION }}
|
# tf_acc_kek_key_version: ${{ vars.TF_ACC_KEK_KEY_VERSION }}
|
||||||
tf_acc_kek_service_account: ${{ vars.TF_ACC_KEK_SERVICE_ACCOUNT }}
|
# tf_acc_kek_service_account: ${{ vars.TF_ACC_KEK_SERVICE_ACCOUNT }}
|
||||||
# service_account_json_file_path: "~/service_account.json"
|
# # service_account_json_file_path: "~/service_account.json"
|
||||||
|
|
||||||
- name: Check coverage threshold
|
- name: Check coverage threshold
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
@ -267,7 +291,7 @@ jobs:
|
||||||
main:
|
main:
|
||||||
if: ${{ github.event_name != 'schedule' }}
|
if: ${{ github.event_name != 'schedule' }}
|
||||||
name: CI run build and linting
|
name: CI run build and linting
|
||||||
runs-on: ubuntu-latest
|
runs-on: stackit-docker
|
||||||
needs:
|
needs:
|
||||||
- config
|
- config
|
||||||
- prepare
|
- prepare
|
||||||
|
|
@ -318,7 +342,7 @@ jobs:
|
||||||
code_coverage:
|
code_coverage:
|
||||||
name: "Code coverage report"
|
name: "Code coverage report"
|
||||||
if: github.event_name == 'pull_request' # Do not run when workflow is triggered by push to main branch
|
if: github.event_name == 'pull_request' # Do not run when workflow is triggered by push to main branch
|
||||||
runs-on: ubuntu-latest
|
runs-on: stackit-docker
|
||||||
needs:
|
needs:
|
||||||
- main
|
- main
|
||||||
- prepare
|
- prepare
|
||||||
|
|
|
||||||
72
.github/workflows/clean_up.yaml
vendored
Normal file
72
.github/workflows/clean_up.yaml
vendored
Normal file
|
|
@ -0,0 +1,72 @@
|
||||||
|
name: TF Acceptance Test CleanUp
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
list_only:
|
||||||
|
description: "only list resources"
|
||||||
|
type: boolean
|
||||||
|
default: true
|
||||||
|
required: true
|
||||||
|
|
||||||
|
res_prefix:
|
||||||
|
description: "resource name prefix"
|
||||||
|
type: string
|
||||||
|
default: 'tf-acc-'
|
||||||
|
required: true
|
||||||
|
|
||||||
|
log_level:
|
||||||
|
description: 'Log Level'
|
||||||
|
required: true
|
||||||
|
default: 'warning'
|
||||||
|
type: choice
|
||||||
|
options:
|
||||||
|
- info
|
||||||
|
- warning
|
||||||
|
- debug
|
||||||
|
- error
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
clean:
|
||||||
|
name: Clean up
|
||||||
|
runs-on: stackit-docker
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
|
- name: Notify
|
||||||
|
uses: ./.github/actions/notify
|
||||||
|
with:
|
||||||
|
webhook_url: ${{ secrets.GOOGLE_WEBHOOK_URL }}
|
||||||
|
title: "[START] CLEAN UP pipeline (#${{ forgejo.run_number }})"
|
||||||
|
subtitle: "${{ forgejo.repository }}"
|
||||||
|
event_title: ${{ forgejo.event_name }}
|
||||||
|
event_author: ${{ forgejo.actor }}
|
||||||
|
event_body: "try to remove all resources with prefix <b>${{ inputs.res_prefix }}</b>"
|
||||||
|
event_number: ${{ forgejo.run_number }}
|
||||||
|
event_url: "https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/actions/runs/${{ forgejo.run_number }}"
|
||||||
|
|
||||||
|
- name: Clean
|
||||||
|
id: clean
|
||||||
|
uses: ./.github/actions/clean_up
|
||||||
|
with:
|
||||||
|
project_id: ${{ vars.TF_ACC_PROJECT_ID }}
|
||||||
|
region: 'eu01'
|
||||||
|
tf_resource_prefix: ${{ inputs.res_prefix }}
|
||||||
|
service_account_json_content_b64: "${{ secrets.TF_ACC_SERVICE_ACCOUNT_JSON_B64 }}"
|
||||||
|
list_only: ${{ inputs.list_only }}
|
||||||
|
log_level: ${{ inputs.log_level }}
|
||||||
|
|
||||||
|
- name: Notify
|
||||||
|
if: always()
|
||||||
|
uses: ./.github/actions/notify
|
||||||
|
with:
|
||||||
|
webhook_url: ${{ secrets.GOOGLE_WEBHOOK_URL }}
|
||||||
|
title: "[END] CLEAN UP pipeline (#${{ forgejo.run_number }})"
|
||||||
|
subtitle: "${{ forgejo.repository }}"
|
||||||
|
event_title: ${{ forgejo.event_name }}
|
||||||
|
event_author: ${{ forgejo.actor }}
|
||||||
|
event_body: "count before cleaning: ${{ steps.clean.outputs.pre_count }} <br /> count after cleaning: ${{ steps.clean.outputs.post_count }}"
|
||||||
|
event_number: ${{ forgejo.run_number }}
|
||||||
|
event_url: "https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/actions/runs/${{ forgejo.run_number }}"
|
||||||
|
status: ${{ steps.clean.outcome == 'success' && 'SUCCESS' || 'FAILURE' }}
|
||||||
25
.github/workflows/notify_pr.yaml
vendored
Normal file
25
.github/workflows/notify_pr.yaml
vendored
Normal file
|
|
@ -0,0 +1,25 @@
|
||||||
|
name: Notify on new PR
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
notify:
|
||||||
|
name: Notify via Google Chat
|
||||||
|
runs-on: stackit-docker
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
|
- name: Notify
|
||||||
|
uses: ./.github/actions/notify
|
||||||
|
with:
|
||||||
|
webhook_url: ${{ secrets.GOOGLE_WEBHOOK_URL }}
|
||||||
|
title: "New Pull Request"
|
||||||
|
event_title: "${{ github.event.pull_request.title }}"
|
||||||
|
event_author: "${{ github.event.pull_request.user.login }}"
|
||||||
|
event_body: "${{ github.event.pull_request.body || 'No description provided.' }}"
|
||||||
|
event_number: "${{ github.event.pull_request.number }}"
|
||||||
|
event_url: "${{ github.event.pull_request.html_url }}"
|
||||||
63
.github/workflows/publish.yaml
vendored
63
.github/workflows/publish.yaml
vendored
|
|
@ -4,9 +4,10 @@ run-name: Publish by @${{ github.actor }}
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
push:
|
push:
|
||||||
tags:
|
tags:
|
||||||
- 'v0.*'
|
- 'v*'
|
||||||
|
|
||||||
env:
|
env:
|
||||||
GO_VERSION: "1.25"
|
GO_VERSION: "1.25"
|
||||||
|
|
@ -16,11 +17,12 @@ env:
|
||||||
jobs:
|
jobs:
|
||||||
config:
|
config:
|
||||||
name: Check GoReleaser config
|
name: Check GoReleaser config
|
||||||
if: github.event_name == 'push' && contains(github.ref, 'refs/tags/')
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v6
|
uses: actions/checkout@v6
|
||||||
|
with:
|
||||||
|
fetch-tags: true
|
||||||
|
|
||||||
- name: Check GoReleaser
|
- name: Check GoReleaser
|
||||||
uses: goreleaser/goreleaser-action@v7
|
uses: goreleaser/goreleaser-action@v7
|
||||||
|
|
@ -29,13 +31,12 @@ jobs:
|
||||||
|
|
||||||
publish:
|
publish:
|
||||||
name: "Publish provider"
|
name: "Publish provider"
|
||||||
if: github.event_name == 'push' && contains(github.ref, 'refs/tags/')
|
|
||||||
needs: config
|
needs: config
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
actions: read # Required to identify workflow run.
|
actions: read # Required to identify workflow run.
|
||||||
checks: write # Required to add status summary.
|
checks: write # Required to add status summary.
|
||||||
contents: read # Required to checkout repository.
|
contents: write # Required to checkout repository.
|
||||||
pull-requests: write # Required to add PR comment.
|
pull-requests: write # Required to add PR comment.
|
||||||
steps:
|
steps:
|
||||||
- name: Install needed tools
|
- name: Install needed tools
|
||||||
|
|
@ -45,6 +46,20 @@ jobs:
|
||||||
|
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v6
|
uses: actions/checkout@v6
|
||||||
|
with:
|
||||||
|
fetch-tags: true
|
||||||
|
|
||||||
|
- name: Notify
|
||||||
|
uses: ./.github/actions/notify
|
||||||
|
with:
|
||||||
|
webhook_url: ${{ secrets.GOOGLE_WEBHOOK_URL }}
|
||||||
|
title: "[START] Publish (#${{ forgejo.run_number }})"
|
||||||
|
subtitle: "${{ forgejo.event_name }} on branch ${{ forgejo.ref }}"
|
||||||
|
event_title: "run started"
|
||||||
|
event_author: ${{ forgejo.actor }}
|
||||||
|
event_body: ""
|
||||||
|
event_number: ${{ forgejo.event.id }}
|
||||||
|
event_url: "https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/actions/runs/${{ forgejo.run_number }}"
|
||||||
|
|
||||||
- name: Setup Go
|
- name: Setup Go
|
||||||
uses: actions/setup-go@v6
|
uses: actions/setup-go@v6
|
||||||
|
|
@ -82,7 +97,7 @@ jobs:
|
||||||
gpg --import ~/private.key.pem
|
gpg --import ~/private.key.pem
|
||||||
rm ~/private.key.pem
|
rm ~/private.key.pem
|
||||||
|
|
||||||
- name: Run GoReleaser with SNAPSHOT
|
- name: Run GoReleaser
|
||||||
if: github.event_name == 'workflow_dispatch'
|
if: github.event_name == 'workflow_dispatch'
|
||||||
id: goreleaser
|
id: goreleaser
|
||||||
env:
|
env:
|
||||||
|
|
@ -90,7 +105,8 @@ jobs:
|
||||||
GPG_FINGERPRINT: ${{ secrets.GPG_FINGERPRINT }}
|
GPG_FINGERPRINT: ${{ secrets.GPG_FINGERPRINT }}
|
||||||
uses: goreleaser/goreleaser-action@v7
|
uses: goreleaser/goreleaser-action@v7
|
||||||
with:
|
with:
|
||||||
args: release --skip publish --clean --snapshot
|
# args: release --skip publish --clean --snapshot
|
||||||
|
args: release --skip publish --clean
|
||||||
|
|
||||||
- name: Run GoReleaser
|
- name: Run GoReleaser
|
||||||
if: github.event_name != 'workflow_dispatch'
|
if: github.event_name != 'workflow_dispatch'
|
||||||
|
|
@ -106,9 +122,15 @@ jobs:
|
||||||
run: |
|
run: |
|
||||||
echo "${{ secrets.PUBLIC_KEY_PEM }}" >public_key.pem
|
echo "${{ secrets.PUBLIC_KEY_PEM }}" >public_key.pem
|
||||||
|
|
||||||
|
- name: Determine version
|
||||||
|
id: get_version
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
VERSION=$(jq -r .version < dist/metadata.json)
|
||||||
|
echo "version=${VERSION}" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
- name: Prepare provider directory structure
|
- name: Prepare provider directory structure
|
||||||
run: |
|
run: |
|
||||||
VERSION=$(jq -r .version < dist/metadata.json)
|
|
||||||
go run generator/main.go \
|
go run generator/main.go \
|
||||||
publish \
|
publish \
|
||||||
--namespace=mhenselin \
|
--namespace=mhenselin \
|
||||||
|
|
@ -117,9 +139,16 @@ jobs:
|
||||||
--domain=tfregistry.sysops.stackit.rocks \
|
--domain=tfregistry.sysops.stackit.rocks \
|
||||||
--gpgFingerprint="${{ secrets.GPG_FINGERPRINT }}" \
|
--gpgFingerprint="${{ secrets.GPG_FINGERPRINT }}" \
|
||||||
--gpgPubKeyFile=public_key.pem \
|
--gpgPubKeyFile=public_key.pem \
|
||||||
--version=${VERSION}
|
--version=${{ steps.get_version.outputs.version }}
|
||||||
|
|
||||||
|
- name: Prepare documentation nav file
|
||||||
|
run: |
|
||||||
|
go run generator/main.go \
|
||||||
|
docs \
|
||||||
|
--outFile nav.md
|
||||||
|
|
||||||
- name: Publish provider to S3
|
- name: Publish provider to S3
|
||||||
|
id: publish_to_s3
|
||||||
run: |
|
run: |
|
||||||
set -e
|
set -e
|
||||||
cd release/
|
cd release/
|
||||||
|
|
@ -136,5 +165,21 @@ jobs:
|
||||||
run: |
|
run: |
|
||||||
set -e
|
set -e
|
||||||
ssh -o StrictHostKeyChecking=no ubuntu@${{ vars.DOCS_SERVER_IP }} 'rm -rf /srv/www/docs'
|
ssh -o StrictHostKeyChecking=no ubuntu@${{ vars.DOCS_SERVER_IP }} 'rm -rf /srv/www/docs'
|
||||||
echo "${{ github.ref_name }}" >docs/_version.txt
|
echo "${{ steps.get_version.outputs.version }}" >docs/_version.txt
|
||||||
|
# echo "${{ github.ref_name }}" >docs/_version.txt
|
||||||
scp -o StrictHostKeyChecking=no -r docs ubuntu@${{ vars.DOCS_SERVER_IP }}:/srv/www/
|
scp -o StrictHostKeyChecking=no -r docs ubuntu@${{ vars.DOCS_SERVER_IP }}:/srv/www/
|
||||||
|
scp -o StrictHostKeyChecking=no nav.md ubuntu@${{ vars.DOCS_SERVER_IP }}:/srv/www/
|
||||||
|
|
||||||
|
- name: Notify
|
||||||
|
if: always()
|
||||||
|
uses: ./.github/actions/notify
|
||||||
|
with:
|
||||||
|
webhook_url: ${{ secrets.GOOGLE_WEBHOOK_URL }}
|
||||||
|
title: "[END] Publish (#${{ forgejo.run_number }})"
|
||||||
|
subtitle: "${{ forgejo.event_name }} on branch ${{ forgejo.ref }}"
|
||||||
|
event_title: "released: ${{ steps.get_version.outputs.version }}"
|
||||||
|
event_author: ${{ forgejo.actor }}
|
||||||
|
event_body: ""
|
||||||
|
event_number: ${{ forgejo.event.id }}
|
||||||
|
event_url: "https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/actions/runs/${{ forgejo.run_number }}"
|
||||||
|
status: "${{ steps.publish_to_s3.outcome == 'success' && 'SUCCESS' || 'FAILURE' }}"
|
||||||
|
|
|
||||||
4
.github/workflows/release.yaml
vendored
4
.github/workflows/release.yaml
vendored
|
|
@ -16,14 +16,14 @@ permissions:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
goreleaser:
|
goreleaser:
|
||||||
runs-on: ubuntu-latest
|
runs-on: stackit-docker
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v6
|
- uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
# Allow goreleaser to access older tag information.
|
# Allow goreleaser to access older tag information.
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- uses: https://code.forgejo.org/actions/setup-go@v6
|
- uses: actions/setup-go@v6
|
||||||
with:
|
with:
|
||||||
go-version-file: "go.mod"
|
go-version-file: "go.mod"
|
||||||
cache: true
|
cache: true
|
||||||
|
|
|
||||||
8
.github/workflows/renovate.yaml
vendored
8
.github/workflows/renovate.yaml
vendored
|
|
@ -8,12 +8,14 @@ on:
|
||||||
jobs:
|
jobs:
|
||||||
renovate:
|
renovate:
|
||||||
name: Renovate
|
name: Renovate
|
||||||
runs-on: ubuntu-latest
|
runs-on: stackit-docker
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v6
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Self-hosted Renovate
|
- name: Self-hosted Renovate
|
||||||
uses: renovatebot/github-action@v41.0.0
|
uses: renovatebot/github-action@v46.1.5
|
||||||
with:
|
with:
|
||||||
configurationFile: .github/renovate.json
|
configurationFile: .github/renovate.json
|
||||||
token: ${{ secrets.RENOVATE_TOKEN }}
|
# token: ${{ secrets.RENOVATE_TOKEN }}
|
||||||
|
token: ${{ env.FORGEJO_TOKEN }}
|
||||||
|
|
|
||||||
2
.github/workflows/stale.yaml
vendored
2
.github/workflows/stale.yaml
vendored
|
|
@ -20,7 +20,7 @@ permissions:
|
||||||
jobs:
|
jobs:
|
||||||
stale:
|
stale:
|
||||||
name: "Stale"
|
name: "Stale"
|
||||||
runs-on: ubuntu-latest
|
runs-on: stackit-docker
|
||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
steps:
|
steps:
|
||||||
- name: "Mark old PRs as stale"
|
- name: "Mark old PRs as stale"
|
||||||
|
|
|
||||||
104
.github/workflows/tf-acc-test.yaml
vendored
104
.github/workflows/tf-acc-test.yaml
vendored
|
|
@ -1,20 +1,71 @@
|
||||||
name: TF Acceptance Tests Workflow
|
name: TF Acceptance Tests Workflow
|
||||||
|
|
||||||
on:
|
on:
|
||||||
|
pull_request:
|
||||||
|
types: [opened, synchronize, reopened]
|
||||||
|
branches:
|
||||||
|
- alpha
|
||||||
|
- main
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- master
|
- master
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
enable_debug:
|
||||||
|
description: "enable terraform debug logs"
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
required: true
|
||||||
|
|
||||||
|
test_timeout_string:
|
||||||
|
description: "string that determines the timeout (default: '120m')"
|
||||||
|
type: string
|
||||||
|
default: '120m'
|
||||||
|
required: true
|
||||||
|
|
||||||
|
test_file:
|
||||||
|
description: "string that determines the test file to run (default all tests)"
|
||||||
|
type: choice
|
||||||
|
options:
|
||||||
|
- tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/postgresflexalpha
|
||||||
|
- tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha
|
||||||
|
- tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexbeta
|
||||||
|
- tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta
|
||||||
|
default: ''
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
acc_test:
|
acc_test:
|
||||||
name: Acceptance Tests
|
name: Acceptance Tests
|
||||||
runs-on: ubuntu-latest
|
runs-on: stackit-docker
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v6
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Run Test
|
- name: set start time
|
||||||
|
id: start_time
|
||||||
|
continue-on-error: true
|
||||||
|
run: |
|
||||||
|
time=$(date --rfc-3339=ns)
|
||||||
|
echo "start_time=$time" >> ${GITHUB_OUTPUT}
|
||||||
|
start=$(date +%s%N)
|
||||||
|
echo "start=$start" >> ${GITHUB_OUTPUT}
|
||||||
|
|
||||||
|
- name: Notify
|
||||||
|
uses: ./.github/actions/notify
|
||||||
|
with:
|
||||||
|
webhook_url: ${{ secrets.GOOGLE_WEBHOOK_URL }}
|
||||||
|
title: "[START] Terraform Acceptance Tests (#${{ forgejo.run_number }})"
|
||||||
|
subtitle: "${{ forgejo.event_name }} on branch ${{ forgejo.ref }}"
|
||||||
|
event_title: "started: ${{ steps.start_time.outputs.start_time }}"
|
||||||
|
event_author: ${{ forgejo.actor }}
|
||||||
|
event_body: ${{ inputs.test_file }}
|
||||||
|
event_number: ${{ forgejo.run_number }}
|
||||||
|
event_url: "https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/actions/runs/${{ forgejo.run_number }}"
|
||||||
|
|
||||||
|
- name: Run Test (workflow dispatch)
|
||||||
|
if: ${{ forgejo.event_name == 'workflow_dispatch' }}
|
||||||
|
id: manual_run
|
||||||
|
continue-on-error: true
|
||||||
uses: ./.github/actions/acc_test
|
uses: ./.github/actions/acc_test
|
||||||
with:
|
with:
|
||||||
go-version: ${{ env.GO_VERSION }}
|
go-version: ${{ env.GO_VERSION }}
|
||||||
|
|
@ -26,4 +77,51 @@ jobs:
|
||||||
tf_acc_kek_key_ring_id: ${{ vars.TF_ACC_KEK_KEY_RING_ID }}
|
tf_acc_kek_key_ring_id: ${{ vars.TF_ACC_KEK_KEY_RING_ID }}
|
||||||
tf_acc_kek_key_version: ${{ vars.TF_ACC_KEK_KEY_VERSION }}
|
tf_acc_kek_key_version: ${{ vars.TF_ACC_KEK_KEY_VERSION }}
|
||||||
tf_acc_kek_service_account: ${{ vars.TF_ACC_KEK_SERVICE_ACCOUNT }}
|
tf_acc_kek_service_account: ${{ vars.TF_ACC_KEK_SERVICE_ACCOUNT }}
|
||||||
# service_account_json_file_path: "~/service_account.json"
|
tf_debug: ${{ inputs.enable_debug }}
|
||||||
|
test_timeout_string: ${{ inputs.test_timeout_string }}
|
||||||
|
test_file: ${{ inputs.test_file }}
|
||||||
|
|
||||||
|
- name: Run Test (automatic)
|
||||||
|
if: ${{ forgejo.event_name != 'workflow_dispatch' }}
|
||||||
|
id: automatic_run
|
||||||
|
continue-on-error: true
|
||||||
|
uses: ./.github/actions/acc_test
|
||||||
|
with:
|
||||||
|
go-version: ${{ env.GO_VERSION }}
|
||||||
|
project_id: ${{ vars.TF_ACC_PROJECT_ID }}
|
||||||
|
region: 'eu01'
|
||||||
|
service_account_json_content_b64: "${{ secrets.TF_ACC_SERVICE_ACCOUNT_JSON_B64 }}"
|
||||||
|
project_user_email: ${{ vars.TEST_PROJECT_USER_EMAIL }}
|
||||||
|
tf_acc_kek_key_id: ${{ vars.TF_ACC_KEK_KEY_ID }}
|
||||||
|
tf_acc_kek_key_ring_id: ${{ vars.TF_ACC_KEK_KEY_RING_ID }}
|
||||||
|
tf_acc_kek_key_version: ${{ vars.TF_ACC_KEK_KEY_VERSION }}
|
||||||
|
tf_acc_kek_service_account: ${{ vars.TF_ACC_KEK_SERVICE_ACCOUNT }}
|
||||||
|
|
||||||
|
- name: set end time
|
||||||
|
id: end_time
|
||||||
|
continue-on-error: true
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
echo "auto status: ${{ steps.automatic_run.outputs.status }}"
|
||||||
|
echo "manual status: ${{ steps.manual_run.outputs.status }}"
|
||||||
|
echo "status: ${{ forgejo.event_name == 'workflow_dispatch' && steps.manual_run.outputs.status || steps.automatic_run.outputs.status }}"
|
||||||
|
echo "end_time=$(date --rfc-3339=ns)" >> ${GITHUB_OUTPUT}
|
||||||
|
end=$(date +%s%N)
|
||||||
|
echo "end=${end}" >> ${GITHUB_OUTPUT}
|
||||||
|
start=${{ steps.start_time.outputs.start }}
|
||||||
|
diff=$((end-start))
|
||||||
|
duration=$(printf "%s.%s" "${diff:0: -9}" "${diff: -9:3}")
|
||||||
|
echo "duration=${duration}" >> ${GITHUB_OUTPUT}
|
||||||
|
|
||||||
|
- name: Notify
|
||||||
|
uses: ./.github/actions/notify
|
||||||
|
with:
|
||||||
|
webhook_url: ${{ secrets.GOOGLE_WEBHOOK_URL }}
|
||||||
|
title: "[END] Terraform Acceptance Tests (#${{ forgejo.run_number }})"
|
||||||
|
subtitle: "${{ forgejo.event_name }} on branch ${{ forgejo.ref }} with status: ${{ forgejo.event_name == 'workflow_dispatch' && steps.manual_run.outputs.status || steps.automatic_run.outputs.status }}"
|
||||||
|
event_title: "run ended: ${{ steps.end_time.outputs.end_time }}, duration: ${{ steps.end_time.outputs.duration }} seconds"
|
||||||
|
event_author: ${{ forgejo.actor }}
|
||||||
|
event_body: "${{ forgejo.event_name == 'workflow_dispatch' && steps.manual_run.outputs.result || steps.automatic_run.outputs.result }}"
|
||||||
|
event_number: ${{ forgejo.event.id }}
|
||||||
|
event_url: "https://tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/actions/runs/${{ forgejo.run_number }}"
|
||||||
|
status: "${{ forgejo.event_name == 'workflow_dispatch' && steps.manual_run.outputs.status || steps.automatic_run.outputs.status }}"
|
||||||
|
|
|
||||||
|
|
@ -29,12 +29,8 @@ linters:
|
||||||
depguard:
|
depguard:
|
||||||
rules:
|
rules:
|
||||||
main:
|
main:
|
||||||
list-mode: lax
|
list-mode: original
|
||||||
allow:
|
allow: []
|
||||||
- tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview
|
|
||||||
- github.com/hashicorp/terraform-plugin-framework
|
|
||||||
- github.com/hashicorp/terraform-plugin-log
|
|
||||||
- github.com/stackitcloud/stackit-sdk-go
|
|
||||||
deny:
|
deny:
|
||||||
- pkg: github.com/stretchr/testify
|
- pkg: github.com/stretchr/testify
|
||||||
desc: Do not use a testing framework
|
desc: Do not use a testing framework
|
||||||
|
|
@ -76,6 +72,7 @@ linters:
|
||||||
exclusions:
|
exclusions:
|
||||||
paths:
|
paths:
|
||||||
- generator/
|
- generator/
|
||||||
|
- internal/testutils
|
||||||
generated: lax
|
generated: lax
|
||||||
warn-unused: true
|
warn-unused: true
|
||||||
# Excluding configuration per-path, per-linter, per-text and per-source.
|
# Excluding configuration per-path, per-linter, per-text and per-source.
|
||||||
|
|
@ -86,7 +83,7 @@ linters:
|
||||||
- gochecknoinits
|
- gochecknoinits
|
||||||
formatters:
|
formatters:
|
||||||
enable:
|
enable:
|
||||||
#- gofmt
|
- gofmt
|
||||||
- goimports
|
- goimports
|
||||||
settings:
|
settings:
|
||||||
goimports:
|
goimports:
|
||||||
|
|
|
||||||
116
README.md
116
README.md
|
|
@ -19,7 +19,7 @@ terraform {
|
||||||
required_providers {
|
required_providers {
|
||||||
stackitprivatepreview = {
|
stackitprivatepreview = {
|
||||||
source = "tfregistry.sysops.stackit.rocks/mhenselin/stackitprivatepreview"
|
source = "tfregistry.sysops.stackit.rocks/mhenselin/stackitprivatepreview"
|
||||||
version = "= 0.0.5-alpha"
|
version = ">= 0.1.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -38,7 +38,6 @@ Check one of the examples in the [examples](examples/) folder.
|
||||||
To authenticate, you will need a [service account](https://docs.stackit.cloud/platform/access-and-identity/service-accounts/). Create it in the [STACKIT Portal](https://portal.stackit.cloud/) and assign the necessary permissions to it, e.g. `project.owner`. There are multiple ways to authenticate:
|
To authenticate, you will need a [service account](https://docs.stackit.cloud/platform/access-and-identity/service-accounts/). Create it in the [STACKIT Portal](https://portal.stackit.cloud/) and assign the necessary permissions to it, e.g. `project.owner`. There are multiple ways to authenticate:
|
||||||
|
|
||||||
- Key flow (recommended)
|
- Key flow (recommended)
|
||||||
- Token flow (is scheduled for deprecation and will be removed on December 17, 2025.)
|
|
||||||
|
|
||||||
When setting up authentication, the provider will always try to use the key flow first and search for credentials in several locations, following a specific order:
|
When setting up authentication, the provider will always try to use the key flow first and search for credentials in several locations, following a specific order:
|
||||||
|
|
||||||
|
|
@ -52,7 +51,6 @@ When setting up authentication, the provider will always try to use the key flow
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"STACKIT_SERVICE_ACCOUNT_TOKEN": "foo_token",
|
|
||||||
"STACKIT_SERVICE_ACCOUNT_KEY_PATH": "path/to/sa_key.json"
|
"STACKIT_SERVICE_ACCOUNT_KEY_PATH": "path/to/sa_key.json"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
@ -71,35 +69,41 @@ To configure the key flow, follow this steps:
|
||||||
|
|
||||||
1. Create a service account key:
|
1. Create a service account key:
|
||||||
|
|
||||||
- Use the [STACKIT Portal](https://portal.stackit.cloud/): go to the `Service Accounts` tab, choose a `Service Account` and go to `Service Account Keys` to create a key. For more details, see [Create a service account key](https://docs.stackit.cloud/platform/access-and-identity/service-accounts/how-tos/manage-service-account-keys/)
|
- Use the [STACKIT Portal](https://portal.stackit.cloud/): go to the `Service Accounts` tab, choose a `Service Account` and go to `Service Account Keys` to create a key. For more details, see [Create a service account key](https://docs.stackit.cloud/platform/access-and-identity/service-accounts/how-tos/manage-service-account-keys/)
|
||||||
|
|
||||||
2. Save the content of the service account key by copying it and saving it in a JSON file.
|
2. Save the content of the service account key by copying it and saving it in a JSON file.
|
||||||
|
|
||||||
The expected format of the service account key is a **JSON** with the following structure:
|
The expected format of the service account key is a **JSON** with the following structure:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"id": "uuid",
|
"id": "uuid",
|
||||||
"publicKey": "public key",
|
"publicKey": "public key",
|
||||||
"createdAt": "2023-08-24T14:15:22Z",
|
"createdAt": "2023-08-24T14:15:22Z",
|
||||||
"validUntil": "2023-08-24T14:15:22Z",
|
"validUntil": "2023-08-24T14:15:22Z",
|
||||||
"keyType": "USER_MANAGED",
|
"keyType": "USER_MANAGED",
|
||||||
"keyOrigin": "USER_PROVIDED",
|
"keyOrigin": "USER_PROVIDED",
|
||||||
"keyAlgorithm": "RSA_2048",
|
"keyAlgorithm": "RSA_2048",
|
||||||
"active": true,
|
"active": true,
|
||||||
"credentials": {
|
"credentials": {
|
||||||
"kid": "string",
|
"kid": "string",
|
||||||
"iss": "my-sa@sa.stackit.cloud",
|
"iss": "my-sa@sa.stackit.cloud",
|
||||||
"sub": "uuid",
|
"sub": "uuid",
|
||||||
"aud": "string",
|
"aud": "string",
|
||||||
(optional) "privateKey": "private key when generated by the SA service"
|
(optional) "privateKey": "private key when generated by the SA service"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
3. Configure the service account key for authentication in the provider by following one of the alternatives below:
|
3. Configure the service account key for authentication in the provider by following one of the alternatives below:
|
||||||
|
|
||||||
- setting the fields in the provider block: `service_account_key` or `service_account_key_path`
|
- setting the fields in the provider block: `service_account_key` or `service_account_key_path`
|
||||||
|
```hcl
|
||||||
|
provider "stackitprivatepreview" {
|
||||||
|
default_region = "eu01"
|
||||||
|
service_account_key_path = "../service_account.json"
|
||||||
|
}
|
||||||
|
```
|
||||||
- setting the environment variable: `STACKIT_SERVICE_ACCOUNT_KEY_PATH` or `STACKIT_SERVICE_ACCOUNT_KEY`
|
- setting the environment variable: `STACKIT_SERVICE_ACCOUNT_KEY_PATH` or `STACKIT_SERVICE_ACCOUNT_KEY`
|
||||||
- ensure the set the service account key in `STACKIT_SERVICE_ACCOUNT_KEY` is correctly formatted. Use e.g.
|
- ensure the set the service account key in `STACKIT_SERVICE_ACCOUNT_KEY` is correctly formatted. Use e.g.
|
||||||
`$ export STACKIT_SERVICE_ACCOUNT_KEY=$(cat ./service-account-key.json)`
|
`$ export STACKIT_SERVICE_ACCOUNT_KEY=$(cat ./service-account-key.json)`
|
||||||
|
|
@ -111,16 +115,6 @@ To configure the key flow, follow this steps:
|
||||||
> - setting the environment variable: `STACKIT_PRIVATE_KEY_PATH` or `STACKIT_PRIVATE_KEY`
|
> - setting the environment variable: `STACKIT_PRIVATE_KEY_PATH` or `STACKIT_PRIVATE_KEY`
|
||||||
> - setting `STACKIT_PRIVATE_KEY_PATH` in the credentials file (see above)
|
> - setting `STACKIT_PRIVATE_KEY_PATH` in the credentials file (see above)
|
||||||
|
|
||||||
### Token flow
|
|
||||||
|
|
||||||
> Is scheduled for deprecation and will be removed on December 17, 2025.
|
|
||||||
|
|
||||||
Using this flow is less secure since the token is long-lived. You can provide the token in several ways:
|
|
||||||
|
|
||||||
1. Setting the field `service_account_token` in the provider
|
|
||||||
2. Setting the environment variable `STACKIT_SERVICE_ACCOUNT_TOKEN`
|
|
||||||
3. Setting it in the credentials file (see above)
|
|
||||||
|
|
||||||
## Backend configuration
|
## Backend configuration
|
||||||
|
|
||||||
To keep track of your terraform state, you can configure an [S3 backend](https://developer.hashicorp.com/terraform/language/settings/backends/s3) using [STACKIT Object Storage](https://docs.stackit.cloud/products/storage/object-storage).
|
To keep track of your terraform state, you can configure an [S3 backend](https://developer.hashicorp.com/terraform/language/settings/backends/s3) using [STACKIT Object Storage](https://docs.stackit.cloud/products/storage/object-storage).
|
||||||
|
|
@ -150,62 +144,6 @@ terraform {
|
||||||
|
|
||||||
Note: AWS specific checks must be skipped as they do not work on STACKIT. For details on what those validations do, see [here](https://developer.hashicorp.com/terraform/language/settings/backends/s3#configuration).
|
Note: AWS specific checks must be skipped as they do not work on STACKIT. For details on what those validations do, see [here](https://developer.hashicorp.com/terraform/language/settings/backends/s3#configuration).
|
||||||
|
|
||||||
## Opting into Beta Resources
|
|
||||||
|
|
||||||
To use beta resources in the STACKIT Terraform provider, follow these steps:
|
|
||||||
|
|
||||||
1. **Provider Configuration Option**
|
|
||||||
|
|
||||||
Set the `enable_beta_resources` option in the provider configuration. This is a boolean attribute that can be either `true` or `false`.
|
|
||||||
|
|
||||||
```hcl
|
|
||||||
provider "stackit" {
|
|
||||||
default_region = "eu01"
|
|
||||||
enable_beta_resources = true
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
2. **Environment Variable**
|
|
||||||
|
|
||||||
Set the `STACKIT_TF_ENABLE_BETA_RESOURCES` environment variable to `"true"` or `"false"`. Other values will be ignored and will produce a warning.
|
|
||||||
|
|
||||||
```sh
|
|
||||||
export STACKIT_TF_ENABLE_BETA_RESOURCES=true
|
|
||||||
```
|
|
||||||
|
|
||||||
> **Note**: The environment variable takes precedence over the provider configuration option. This means that if the `STACKIT_TF_ENABLE_BETA_RESOURCES` environment variable is set to a valid value (`"true"` or `"false"`), it will override the `enable_beta_resources` option specified in the provider configuration.
|
|
||||||
|
|
||||||
For more details, please refer to the [beta resources configuration guide](https://registry.terraform.io/providers/stackitcloud/stackit/latest/docs/guides/opting_into_beta_resources).
|
|
||||||
|
|
||||||
## Opting into Experiments
|
|
||||||
|
|
||||||
Experiments are features that are even less mature and stable than Beta Resources. While there is some assumed stability in beta resources, will have to expect breaking changes while using experimental resources. Experimental Resources do not come with any support or warranty.
|
|
||||||
|
|
||||||
To enable experiments set the experiments field in the provider definition:
|
|
||||||
|
|
||||||
```hcl
|
|
||||||
provider "stackit" {
|
|
||||||
default_region = "eu01"
|
|
||||||
experiments = ["iam", "routing-tables", "network"]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Available Experiments
|
|
||||||
|
|
||||||
#### `iam`
|
|
||||||
|
|
||||||
Enables IAM management features in the Terraform provider. The underlying IAM API is expected to undergo a redesign in the future, which leads to it being considered experimental.
|
|
||||||
|
|
||||||
#### `routing-tables`
|
|
||||||
|
|
||||||
This feature enables experimental routing table capabilities in the Terraform Provider, available only to designated SNAs at this time.
|
|
||||||
|
|
||||||
#### `network`
|
|
||||||
|
|
||||||
The `stackit_network` provides the fields `region` and `routing_table_id` when the experiment flag `network` is set.
|
|
||||||
The underlying API is not stable yet and could change in the future.
|
|
||||||
If you don't need these fields, don't set the experiment flag `network`, to use the stable api.
|
|
||||||
|
|
||||||
## Acceptance Tests
|
## Acceptance Tests
|
||||||
|
|
||||||
> [!WARNING]
|
> [!WARNING]
|
||||||
|
|
|
||||||
|
|
@ -28,6 +28,9 @@ data "stackitprivatepreview_postgresflexalpha_database" "example" {
|
||||||
- `database_id` (Number) The ID of the database.
|
- `database_id` (Number) The ID of the database.
|
||||||
- `instance_id` (String) The ID of the instance.
|
- `instance_id` (String) The ID of the instance.
|
||||||
- `project_id` (String) The STACKIT project ID.
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
- `region` (String) The region which should be addressed
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
### Read-Only
|
### Read-Only
|
||||||
|
|
|
||||||
|
|
@ -26,6 +26,9 @@ data "stackitprivatepreview_postgresflexalpha_instance" "example" {
|
||||||
|
|
||||||
- `instance_id` (String) The ID of the instance.
|
- `instance_id` (String) The ID of the instance.
|
||||||
- `project_id` (String) The STACKIT project ID.
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
- `region` (String) The region which should be addressed
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
### Read-Only
|
### Read-Only
|
||||||
|
|
@ -37,11 +40,18 @@ data "stackitprivatepreview_postgresflexalpha_instance" "example" {
|
||||||
|
|
||||||
⚠︝ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected. (see [below for nested schema](#nestedatt--encryption))
|
⚠︝ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected. (see [below for nested schema](#nestedatt--encryption))
|
||||||
- `flavor_id` (String) The id of the instance flavor.
|
- `flavor_id` (String) The id of the instance flavor.
|
||||||
|
- `id` (String) internal ID
|
||||||
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
|
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
|
||||||
|
- `labels` (Map of String) Key-value pairs, 63 characters max, begin and end with an alphanumerical character,
|
||||||
|
may contain dashes (-), underscores (_), dots (.), and alphanumerics between. Key MUST be at least 1 character.
|
||||||
|
Max 64 labels
|
||||||
|
Regex for keys: ^(?=.{1,63}$)([A-Za-z0-9][-A-Za-z0-9_.]*)?[A-Za-z0-9]$
|
||||||
|
Regex for values: ^(?=.{0,63}$)(([A-Za-z0-9][-A-Za-z0-9_.]*)?[A-Za-z0-9])*$
|
||||||
|
The stackit- prefix is reserved and cannot be used for Keys.
|
||||||
- `name` (String) The name of the instance.
|
- `name` (String) The name of the instance.
|
||||||
- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
|
- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
|
||||||
- `replicas` (Number) How many replicas the instance should have.
|
- `replicas` (Number) How many replicas the instance should have.
|
||||||
- `retention_days` (Number) How long backups are retained. The value can only be between 32 and 365 days.
|
- `retention_days` (Number) How long backups are retained. The value can only be between 32 and 90 days.
|
||||||
- `status` (String) The current status of the instance.
|
- `status` (String) The current status of the instance.
|
||||||
- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
|
- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
|
||||||
- `tf_original_api_id` (String) The ID of the instance.
|
- `tf_original_api_id` (String) The ID of the instance.
|
||||||
|
|
|
||||||
|
|
@ -27,12 +27,12 @@ data "stackitprivatepreview_postgresflexalpha_user" "example" {
|
||||||
|
|
||||||
- `instance_id` (String) The ID of the instance.
|
- `instance_id` (String) The ID of the instance.
|
||||||
- `project_id` (String) The STACKIT project ID.
|
- `project_id` (String) The STACKIT project ID.
|
||||||
- `region` (String) The region which should be addressed
|
|
||||||
- `user_id` (Number) The ID of the user.
|
- `user_id` (Number) The ID of the user.
|
||||||
|
|
||||||
### Optional
|
### Optional
|
||||||
|
|
||||||
- `id` (String) Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`,`user_id`\".",
|
- `id` (String) Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`,`user_id`\".",
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
### Read-Only
|
### Read-Only
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,32 +0,0 @@
|
||||||
---
|
|
||||||
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
|
||||||
page_title: "stackitprivatepreview_sqlserverflexalpha_database Data Source - stackitprivatepreview"
|
|
||||||
subcategory: ""
|
|
||||||
description: |-
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# stackitprivatepreview_sqlserverflexalpha_database (Data Source)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
<!-- schema generated by tfplugindocs -->
|
|
||||||
## Schema
|
|
||||||
|
|
||||||
### Required
|
|
||||||
|
|
||||||
- `database_name` (String) The name of the database.
|
|
||||||
- `instance_id` (String) The ID of the instance.
|
|
||||||
- `project_id` (String) The STACKIT project ID.
|
|
||||||
- `region` (String) The region which should be addressed
|
|
||||||
|
|
||||||
### Read-Only
|
|
||||||
|
|
||||||
- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
|
|
||||||
- `compatibility_level` (Number) CompatibilityLevel of the Database.
|
|
||||||
- `id` (String) The terraform internal identifier.
|
|
||||||
- `name` (String) The name of the database.
|
|
||||||
- `owner` (String) The owner of the database.
|
|
||||||
- `tf_original_api_id` (Number) The id of the database.
|
|
||||||
|
|
@ -1,77 +0,0 @@
|
||||||
---
|
|
||||||
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
|
||||||
page_title: "stackitprivatepreview_sqlserverflexalpha_instance Data Source - stackitprivatepreview"
|
|
||||||
subcategory: ""
|
|
||||||
description: |-
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# stackitprivatepreview_sqlserverflexalpha_instance (Data Source)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Example Usage
|
|
||||||
|
|
||||||
```terraform
|
|
||||||
data "stackitprivatepreview_sqlserverflexalpha_instance" "example" {
|
|
||||||
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
|
||||||
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
<!-- schema generated by tfplugindocs -->
|
|
||||||
## Schema
|
|
||||||
|
|
||||||
### Required
|
|
||||||
|
|
||||||
- `instance_id` (String) The ID of the instance.
|
|
||||||
- `project_id` (String) The STACKIT project ID.
|
|
||||||
- `region` (String) The region which should be addressed
|
|
||||||
|
|
||||||
### Read-Only
|
|
||||||
|
|
||||||
- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
|
|
||||||
- `edition` (String) Edition of the MSSQL server instance
|
|
||||||
- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption))
|
|
||||||
- `flavor_id` (String) The id of the instance flavor.
|
|
||||||
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
|
|
||||||
- `name` (String) The name of the instance.
|
|
||||||
- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
|
|
||||||
- `replicas` (Number) How many replicas the instance should have.
|
|
||||||
- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365
|
|
||||||
- `status` (String)
|
|
||||||
- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
|
|
||||||
- `tf_original_api_id` (String) The ID of the instance.
|
|
||||||
- `version` (String) The sqlserver version used for the instance.
|
|
||||||
|
|
||||||
<a id="nestedatt--encryption"></a>
|
|
||||||
### Nested Schema for `encryption`
|
|
||||||
|
|
||||||
Read-Only:
|
|
||||||
|
|
||||||
- `kek_key_id` (String) The key identifier
|
|
||||||
- `kek_key_ring_id` (String) The keyring identifier
|
|
||||||
- `kek_key_version` (String) The key version
|
|
||||||
- `service_account` (String)
|
|
||||||
|
|
||||||
|
|
||||||
<a id="nestedatt--network"></a>
|
|
||||||
### Nested Schema for `network`
|
|
||||||
|
|
||||||
Read-Only:
|
|
||||||
|
|
||||||
- `access_scope` (String) The network access scope of the instance
|
|
||||||
|
|
||||||
⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.
|
|
||||||
- `acl` (List of String) List of IPV4 cidr.
|
|
||||||
- `instance_address` (String)
|
|
||||||
- `router_address` (String)
|
|
||||||
|
|
||||||
|
|
||||||
<a id="nestedatt--storage"></a>
|
|
||||||
### Nested Schema for `storage`
|
|
||||||
|
|
||||||
Read-Only:
|
|
||||||
|
|
||||||
- `class` (String) The storage class for the storage.
|
|
||||||
- `size` (Number) The storage size in Gigabytes.
|
|
||||||
|
|
@ -1,62 +0,0 @@
|
||||||
---
|
|
||||||
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
|
||||||
page_title: "stackitprivatepreview_sqlserverflexalpha_user Data Source - stackitprivatepreview"
|
|
||||||
subcategory: ""
|
|
||||||
description: |-
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# stackitprivatepreview_sqlserverflexalpha_user (Data Source)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Example Usage
|
|
||||||
|
|
||||||
```terraform
|
|
||||||
data "stackitprivatepreview_sqlserverflexalpha_user" "example" {
|
|
||||||
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
|
||||||
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
|
||||||
user_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
<!-- schema generated by tfplugindocs -->
|
|
||||||
## Schema
|
|
||||||
|
|
||||||
### Required
|
|
||||||
|
|
||||||
- `instance_id` (String) The ID of the instance.
|
|
||||||
- `project_id` (String) The STACKIT project ID.
|
|
||||||
- `region` (String) The region which should be addressed
|
|
||||||
|
|
||||||
### Optional
|
|
||||||
|
|
||||||
- `page` (Number) Number of the page of items list to be returned.
|
|
||||||
- `size` (Number) Number of items to be returned on each page.
|
|
||||||
- `sort` (String) Sorting of the users to be returned on each page.
|
|
||||||
|
|
||||||
### Read-Only
|
|
||||||
|
|
||||||
- `pagination` (Attributes) (see [below for nested schema](#nestedatt--pagination))
|
|
||||||
- `users` (Attributes List) List of all users inside an instance (see [below for nested schema](#nestedatt--users))
|
|
||||||
|
|
||||||
<a id="nestedatt--pagination"></a>
|
|
||||||
### Nested Schema for `pagination`
|
|
||||||
|
|
||||||
Read-Only:
|
|
||||||
|
|
||||||
- `page` (Number)
|
|
||||||
- `size` (Number)
|
|
||||||
- `sort` (String)
|
|
||||||
- `total_pages` (Number)
|
|
||||||
- `total_rows` (Number)
|
|
||||||
|
|
||||||
|
|
||||||
<a id="nestedatt--users"></a>
|
|
||||||
### Nested Schema for `users`
|
|
||||||
|
|
||||||
Read-Only:
|
|
||||||
|
|
||||||
- `status` (String) The current status of the user.
|
|
||||||
- `tf_original_api_id` (Number) The ID of the user.
|
|
||||||
- `username` (String) The name of the user.
|
|
||||||
54
docs/data-sources/sqlserverflexbeta_flavor.md
Normal file
54
docs/data-sources/sqlserverflexbeta_flavor.md
Normal file
|
|
@ -0,0 +1,54 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexbeta_flavor Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexbeta_flavor (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
data "stackitprivatepreview_sqlserverflexbeta_flavor" "flavor" {
|
||||||
|
project_id = var.project_id
|
||||||
|
region = var.region
|
||||||
|
cpu = 4
|
||||||
|
ram = 16
|
||||||
|
node_type = "Single"
|
||||||
|
storage_class = "premium-perf2-stackit"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `cpu` (Number) The cpu count of the instance.
|
||||||
|
- `node_type` (String) defines the nodeType it can be either single or HA
|
||||||
|
- `project_id` (String) The project ID of the flavor.
|
||||||
|
- `ram` (Number) The memory of the instance in Gibibyte.
|
||||||
|
- `region` (String) The region of the flavor.
|
||||||
|
- `storage_class` (String) The memory of the instance in Gibibyte.
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `description` (String) The flavor description.
|
||||||
|
- `flavor_id` (String) The id of the instance flavor.
|
||||||
|
- `id` (String) The id of the instance flavor.
|
||||||
|
- `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte.
|
||||||
|
- `min_gb` (Number) minimum storage which is required to order in Gigabyte.
|
||||||
|
- `storage_classes` (Attributes List) maximum storage which can be ordered for the flavor in Gigabyte. (see [below for nested schema](#nestedatt--storage_classes))
|
||||||
|
|
||||||
|
<a id="nestedatt--storage_classes"></a>
|
||||||
|
### Nested Schema for `storage_classes`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `class` (String)
|
||||||
|
- `max_io_per_sec` (Number)
|
||||||
|
- `max_through_in_mb` (Number)
|
||||||
|
|
@ -54,4 +54,4 @@ import {
|
||||||
|
|
||||||
### Read-Only
|
### Read-Only
|
||||||
|
|
||||||
- `id` (Number) The id of the database.
|
- `id` (String) The id of the database.
|
||||||
|
|
|
||||||
|
|
@ -13,6 +13,33 @@ description: |-
|
||||||
## Example Usage
|
## Example Usage
|
||||||
|
|
||||||
```terraform
|
```terraform
|
||||||
|
# NOTE: flavor handling will change in future
|
||||||
|
# V2 compatible flavor usage (example without encryption)
|
||||||
|
resource "stackitprivatepreview_postgresflexalpha_instance" "example-instance" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
name = "example-instance"
|
||||||
|
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
||||||
|
backup_schedule = "0 0 * * *"
|
||||||
|
retention_days = 30
|
||||||
|
flavor = {
|
||||||
|
cpu = 2
|
||||||
|
ram = 4
|
||||||
|
}
|
||||||
|
replicas = 1
|
||||||
|
storage = {
|
||||||
|
performance_class = "premium-perf2-stackit"
|
||||||
|
size = 10
|
||||||
|
}
|
||||||
|
network = {
|
||||||
|
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
||||||
|
access_scope = "PUBLIC"
|
||||||
|
}
|
||||||
|
version = 17
|
||||||
|
}
|
||||||
|
|
||||||
|
# future use of flavor (implemented in V3 API)
|
||||||
|
# first determine flavor and then use the flavor_id
|
||||||
|
|
||||||
resource "stackitprivatepreview_postgresflexalpha_instance" "example-instance" {
|
resource "stackitprivatepreview_postgresflexalpha_instance" "example-instance" {
|
||||||
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
name = "example-instance"
|
name = "example-instance"
|
||||||
|
|
@ -60,11 +87,10 @@ import {
|
||||||
### Required
|
### Required
|
||||||
|
|
||||||
- `backup_schedule` (String) The schedule for when the database backup will be created. Currently, ONLY daily schedules are supported (every 24 hours). The schedule is written as a cron schedule.
|
- `backup_schedule` (String) The schedule for when the database backup will be created. Currently, ONLY daily schedules are supported (every 24 hours). The schedule is written as a cron schedule.
|
||||||
- `flavor_id` (String) The id of the instance flavor.
|
|
||||||
- `name` (String) The name of the instance.
|
- `name` (String) The name of the instance.
|
||||||
- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
|
- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
|
||||||
- `replicas` (Number) How many replicas the instance should have.
|
- `replicas` (Number) How many replicas the instance should have.
|
||||||
- `retention_days` (Number) How long backups are retained. The value can only be between 32 and 365 days.
|
- `retention_days` (Number) How long backups are retained. The value can only be between 32 and 90 days.
|
||||||
- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
|
- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
|
||||||
- `version` (String) The Postgres version used for the instance. See [Versions Endpoint](/documentation/postgres-flex-service/version/v3alpha1#tag/Version) for supported version parameters.
|
- `version` (String) The Postgres version used for the instance. See [Versions Endpoint](/documentation/postgres-flex-service/version/v3alpha1#tag/Version) for supported version parameters.
|
||||||
|
|
||||||
|
|
@ -73,6 +99,8 @@ import {
|
||||||
- `encryption` (Attributes) The configuration for instance's volume and backup storage encryption.
|
- `encryption` (Attributes) The configuration for instance's volume and backup storage encryption.
|
||||||
|
|
||||||
⚠︝ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected. (see [below for nested schema](#nestedatt--encryption))
|
⚠︝ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected. (see [below for nested schema](#nestedatt--encryption))
|
||||||
|
- `flavor` (Attributes, Deprecated) (see [below for nested schema](#nestedatt--flavor))
|
||||||
|
- `flavor_id` (String) The id of the instance flavor.
|
||||||
- `instance_id` (String) The ID of the instance.
|
- `instance_id` (String) The ID of the instance.
|
||||||
- `project_id` (String) The STACKIT project ID.
|
- `project_id` (String) The STACKIT project ID.
|
||||||
- `region` (String) The region which should be addressed
|
- `region` (String) The region which should be addressed
|
||||||
|
|
@ -122,6 +150,20 @@ Required:
|
||||||
- `service_account` (String)
|
- `service_account` (String)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--flavor"></a>
|
||||||
|
### Nested Schema for `flavor`
|
||||||
|
|
||||||
|
Optional:
|
||||||
|
|
||||||
|
- `cpu` (Number, Deprecated)
|
||||||
|
- `ram` (Number, Deprecated)
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `description` (String)
|
||||||
|
- `id` (String)
|
||||||
|
|
||||||
|
|
||||||
<a id="nestedatt--connection_info"></a>
|
<a id="nestedatt--connection_info"></a>
|
||||||
### Nested Schema for `connection_info`
|
### Nested Schema for `connection_info`
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -54,6 +54,6 @@ import {
|
||||||
|
|
||||||
### Read-Only
|
### Read-Only
|
||||||
|
|
||||||
- `id` (Number) The ID of the user.
|
- `id` (String) The ID of the user.
|
||||||
- `password` (String) The password for the user.
|
- `password` (String) The password for the user.
|
||||||
- `status` (String) The current status of the user.
|
- `status` (String) The current status of the user.
|
||||||
|
|
|
||||||
|
|
@ -1,63 +0,0 @@
|
||||||
---
|
|
||||||
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
|
||||||
page_title: "stackitprivatepreview_sqlserverflexalpha_database Resource - stackitprivatepreview"
|
|
||||||
subcategory: ""
|
|
||||||
description: |-
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# stackitprivatepreview_sqlserverflexalpha_database (Resource)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Example Usage
|
|
||||||
|
|
||||||
```terraform
|
|
||||||
resource "stackitprivatepreview_sqlserverflexalpha_database" "example" {
|
|
||||||
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
|
||||||
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
|
||||||
collation = ""
|
|
||||||
compatibility = "160"
|
|
||||||
name = ""
|
|
||||||
owner = ""
|
|
||||||
}
|
|
||||||
|
|
||||||
# Only use the import statement, if you want to import a existing sqlserverflex database
|
|
||||||
import {
|
|
||||||
to = stackitprivatepreview_sqlserverflexalpha_database.import-example
|
|
||||||
id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
|
|
||||||
}
|
|
||||||
|
|
||||||
import {
|
|
||||||
to = stackitprivatepreview_sqlserverflexalpha_database.import-example
|
|
||||||
identity = {
|
|
||||||
project_id = "project.id"
|
|
||||||
region = "region"
|
|
||||||
instance_id = "instance.id"
|
|
||||||
database_id = "database.id"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
<!-- schema generated by tfplugindocs -->
|
|
||||||
## Schema
|
|
||||||
|
|
||||||
### Required
|
|
||||||
|
|
||||||
- `name` (String) The name of the database.
|
|
||||||
- `owner` (String) The owner of the database.
|
|
||||||
|
|
||||||
### Optional
|
|
||||||
|
|
||||||
- `collation` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
|
|
||||||
- `compatibility` (Number) CompatibilityLevel of the Database.
|
|
||||||
- `database_name` (String) The name of the database.
|
|
||||||
- `instance_id` (String) The ID of the instance.
|
|
||||||
- `project_id` (String) The STACKIT project ID.
|
|
||||||
- `region` (String) The region which should be addressed
|
|
||||||
|
|
||||||
### Read-Only
|
|
||||||
|
|
||||||
- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
|
|
||||||
- `compatibility_level` (Number) CompatibilityLevel of the Database.
|
|
||||||
- `id` (Number) The id of the database.
|
|
||||||
|
|
@ -1,103 +0,0 @@
|
||||||
---
|
|
||||||
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
|
||||||
page_title: "stackitprivatepreview_sqlserverflexalpha_instance Resource - stackitprivatepreview"
|
|
||||||
subcategory: ""
|
|
||||||
description: |-
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# stackitprivatepreview_sqlserverflexalpha_instance (Resource)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Example Usage
|
|
||||||
|
|
||||||
```terraform
|
|
||||||
resource "stackitprivatepreview_sqlserverflexalpha_instance" "example" {
|
|
||||||
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
|
||||||
name = "example-instance"
|
|
||||||
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
|
||||||
backup_schedule = "00 00 * * *"
|
|
||||||
flavor = {
|
|
||||||
cpu = 4
|
|
||||||
ram = 16
|
|
||||||
}
|
|
||||||
storage = {
|
|
||||||
class = "class"
|
|
||||||
size = 5
|
|
||||||
}
|
|
||||||
version = 2022
|
|
||||||
}
|
|
||||||
|
|
||||||
# Only use the import statement, if you want to import an existing sqlserverflex instance
|
|
||||||
import {
|
|
||||||
to = stackitprivatepreview_sqlserverflexalpha_instance.import-example
|
|
||||||
id = "${var.project_id},${var.region},${var.sql_instance_id}"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
<!-- schema generated by tfplugindocs -->
|
|
||||||
## Schema
|
|
||||||
|
|
||||||
### Required
|
|
||||||
|
|
||||||
- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
|
|
||||||
- `flavor_id` (String) The id of the instance flavor.
|
|
||||||
- `name` (String) The name of the instance.
|
|
||||||
- `network` (Attributes) the network configuration of the instance. (see [below for nested schema](#nestedatt--network))
|
|
||||||
- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365
|
|
||||||
- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
|
|
||||||
- `version` (String) The sqlserver version used for the instance.
|
|
||||||
|
|
||||||
### Optional
|
|
||||||
|
|
||||||
- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption))
|
|
||||||
- `instance_id` (String) The ID of the instance.
|
|
||||||
- `project_id` (String) The STACKIT project ID.
|
|
||||||
- `region` (String) The region which should be addressed
|
|
||||||
|
|
||||||
### Read-Only
|
|
||||||
|
|
||||||
- `edition` (String) Edition of the MSSQL server instance
|
|
||||||
- `id` (String) The ID of the instance.
|
|
||||||
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
|
|
||||||
- `replicas` (Number) How many replicas the instance should have.
|
|
||||||
- `status` (String)
|
|
||||||
|
|
||||||
<a id="nestedatt--network"></a>
|
|
||||||
### Nested Schema for `network`
|
|
||||||
|
|
||||||
Required:
|
|
||||||
|
|
||||||
- `acl` (List of String) List of IPV4 cidr.
|
|
||||||
|
|
||||||
Optional:
|
|
||||||
|
|
||||||
- `access_scope` (String) The network access scope of the instance
|
|
||||||
|
|
||||||
⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.
|
|
||||||
|
|
||||||
Read-Only:
|
|
||||||
|
|
||||||
- `instance_address` (String)
|
|
||||||
- `router_address` (String)
|
|
||||||
|
|
||||||
|
|
||||||
<a id="nestedatt--storage"></a>
|
|
||||||
### Nested Schema for `storage`
|
|
||||||
|
|
||||||
Required:
|
|
||||||
|
|
||||||
- `class` (String) The storage class for the storage.
|
|
||||||
- `size` (Number) The storage size in Gigabytes.
|
|
||||||
|
|
||||||
|
|
||||||
<a id="nestedatt--encryption"></a>
|
|
||||||
### Nested Schema for `encryption`
|
|
||||||
|
|
||||||
Required:
|
|
||||||
|
|
||||||
- `kek_key_id` (String) The key identifier
|
|
||||||
- `kek_key_ring_id` (String) The keyring identifier
|
|
||||||
- `kek_key_version` (String) The key version
|
|
||||||
- `service_account` (String)
|
|
||||||
|
|
@ -1,53 +0,0 @@
|
||||||
---
|
|
||||||
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
|
||||||
page_title: "stackitprivatepreview_sqlserverflexalpha_user Resource - stackitprivatepreview"
|
|
||||||
subcategory: ""
|
|
||||||
description: |-
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# stackitprivatepreview_sqlserverflexalpha_user (Resource)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Example Usage
|
|
||||||
|
|
||||||
```terraform
|
|
||||||
resource "stackitprivatepreview_sqlserverflexalpha_user" "example" {
|
|
||||||
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
|
||||||
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
|
||||||
username = "username"
|
|
||||||
roles = ["role"]
|
|
||||||
}
|
|
||||||
|
|
||||||
# Only use the import statement, if you want to import an existing sqlserverflex user
|
|
||||||
import {
|
|
||||||
to = stackitprivatepreview_sqlserverflexalpha_user.import-example
|
|
||||||
id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
<!-- schema generated by tfplugindocs -->
|
|
||||||
## Schema
|
|
||||||
|
|
||||||
### Required
|
|
||||||
|
|
||||||
- `roles` (List of String) A list containing the user roles for the instance. A list with the valid user roles can be retrieved using the List Roles endpoint.
|
|
||||||
- `username` (String) The name of the user.
|
|
||||||
|
|
||||||
### Optional
|
|
||||||
|
|
||||||
- `default_database` (String) The default database for a user of the instance.
|
|
||||||
- `instance_id` (String) The ID of the instance.
|
|
||||||
- `project_id` (String) The STACKIT project ID.
|
|
||||||
- `region` (String) The region which should be addressed
|
|
||||||
- `user_id` (Number) The ID of the user.
|
|
||||||
|
|
||||||
### Read-Only
|
|
||||||
|
|
||||||
- `host` (String) The host of the instance in which the user belongs to.
|
|
||||||
- `id` (Number) The ID of the user.
|
|
||||||
- `password` (String) The password for the user.
|
|
||||||
- `port` (Number) The port of the instance in which the user belongs to.
|
|
||||||
- `status` (String) The current status of the user.
|
|
||||||
- `uri` (String) The connection string for the user to the instance.
|
|
||||||
|
|
@ -13,6 +13,31 @@ description: |-
|
||||||
## Example Usage
|
## Example Usage
|
||||||
|
|
||||||
```terraform
|
```terraform
|
||||||
|
# NOTE: flavor handling will change in future
|
||||||
|
# V2 compatible flavor usage
|
||||||
|
resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
name = "example-instance"
|
||||||
|
backup_schedule = "0 3 * * *"
|
||||||
|
retention_days = 31
|
||||||
|
flavor = {
|
||||||
|
cpu = 2
|
||||||
|
ram = 4
|
||||||
|
}
|
||||||
|
storage = {
|
||||||
|
class = "premium-perf2-stackit"
|
||||||
|
size = 50
|
||||||
|
}
|
||||||
|
version = 2022
|
||||||
|
network = {
|
||||||
|
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
||||||
|
access_scope = "SNA"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# future use of flavor (implemented in V3 API)
|
||||||
|
# first determine flavor and then use the flavor_id
|
||||||
|
|
||||||
# without encryption and SNA
|
# without encryption and SNA
|
||||||
resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" {
|
resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" {
|
||||||
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
|
@ -97,7 +122,6 @@ import {
|
||||||
### Required
|
### Required
|
||||||
|
|
||||||
- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
|
- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
|
||||||
- `flavor_id` (String) The id of the instance flavor.
|
|
||||||
- `name` (String) The name of the instance.
|
- `name` (String) The name of the instance.
|
||||||
- `network` (Attributes) the network configuration of the instance. (see [below for nested schema](#nestedatt--network))
|
- `network` (Attributes) the network configuration of the instance. (see [below for nested schema](#nestedatt--network))
|
||||||
- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365
|
- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365
|
||||||
|
|
@ -107,6 +131,8 @@ import {
|
||||||
### Optional
|
### Optional
|
||||||
|
|
||||||
- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption))
|
- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption))
|
||||||
|
- `flavor` (Attributes, Deprecated) (see [below for nested schema](#nestedatt--flavor))
|
||||||
|
- `flavor_id` (String) The id of the instance flavor.
|
||||||
- `instance_id` (String) The ID of the instance.
|
- `instance_id` (String) The ID of the instance.
|
||||||
- `project_id` (String) The STACKIT project ID.
|
- `project_id` (String) The STACKIT project ID.
|
||||||
- `region` (String) The region which should be addressed
|
- `region` (String) The region which should be addressed
|
||||||
|
|
@ -156,3 +182,17 @@ Required:
|
||||||
- `kek_key_ring_id` (String) The keyring identifier
|
- `kek_key_ring_id` (String) The keyring identifier
|
||||||
- `kek_key_version` (String) The key version
|
- `kek_key_version` (String) The key version
|
||||||
- `service_account` (String)
|
- `service_account` (String)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--flavor"></a>
|
||||||
|
### Nested Schema for `flavor`
|
||||||
|
|
||||||
|
Optional:
|
||||||
|
|
||||||
|
- `cpu` (Number, Deprecated)
|
||||||
|
- `ram` (Number, Deprecated)
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `description` (String)
|
||||||
|
- `id` (String)
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,30 @@
|
||||||
|
# NOTE: flavor handling will change in future
|
||||||
|
# V2 compatible flavor usage (example without encryption)
|
||||||
|
resource "stackitprivatepreview_postgresflexalpha_instance" "example-instance" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
name = "example-instance"
|
||||||
|
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
||||||
|
backup_schedule = "0 0 * * *"
|
||||||
|
retention_days = 30
|
||||||
|
flavor = {
|
||||||
|
cpu = 2
|
||||||
|
ram = 4
|
||||||
|
}
|
||||||
|
replicas = 1
|
||||||
|
storage = {
|
||||||
|
performance_class = "premium-perf2-stackit"
|
||||||
|
size = 10
|
||||||
|
}
|
||||||
|
network = {
|
||||||
|
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
||||||
|
access_scope = "PUBLIC"
|
||||||
|
}
|
||||||
|
version = 17
|
||||||
|
}
|
||||||
|
|
||||||
|
# future use of flavor (implemented in V3 API)
|
||||||
|
# first determine flavor and then use the flavor_id
|
||||||
|
|
||||||
resource "stackitprivatepreview_postgresflexalpha_instance" "example-instance" {
|
resource "stackitprivatepreview_postgresflexalpha_instance" "example-instance" {
|
||||||
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
name = "example-instance"
|
name = "example-instance"
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,28 @@
|
||||||
|
# NOTE: flavor handling will change in future
|
||||||
|
# V2 compatible flavor usage
|
||||||
|
resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
name = "example-instance"
|
||||||
|
backup_schedule = "0 3 * * *"
|
||||||
|
retention_days = 31
|
||||||
|
flavor = {
|
||||||
|
cpu = 2
|
||||||
|
ram = 4
|
||||||
|
}
|
||||||
|
storage = {
|
||||||
|
class = "premium-perf2-stackit"
|
||||||
|
size = 50
|
||||||
|
}
|
||||||
|
version = 2022
|
||||||
|
network = {
|
||||||
|
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
||||||
|
access_scope = "SNA"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# future use of flavor (implemented in V3 API)
|
||||||
|
# first determine flavor and then use the flavor_id
|
||||||
|
|
||||||
# without encryption and SNA
|
# without encryption and SNA
|
||||||
resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" {
|
resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" {
|
||||||
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
|
|
||||||
|
|
@ -8,10 +8,11 @@ import (
|
||||||
"go/token"
|
"go/token"
|
||||||
"log/slog"
|
"log/slog"
|
||||||
"os"
|
"os"
|
||||||
"os/exec"
|
|
||||||
"path"
|
"path"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/generator/cmd/tools"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Builder struct {
|
type Builder struct {
|
||||||
|
|
@ -276,20 +277,14 @@ func handleLine(line string) (string, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *Builder) determineRoot() error {
|
func (b *Builder) determineRoot() error {
|
||||||
cmd := exec.Command("git", "rev-parse", "--show-toplevel")
|
root, err := tools.GetGitRoot()
|
||||||
out, err := cmd.Output()
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
lines := strings.Split(string(out), "\n")
|
b.rootDir = root
|
||||||
if lines[0] == "" {
|
|
||||||
return fmt.Errorf("unable to determine root directory from git")
|
|
||||||
}
|
|
||||||
b.rootDir = lines[0]
|
|
||||||
if b.Verbose {
|
if b.Verbose {
|
||||||
slog.Info(" ... using root", "dir", b.rootDir)
|
slog.Info(" ... using root", "dir", b.rootDir)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
247
generator/cmd/docCmd.go
Normal file
247
generator/cmd/docCmd.go
Normal file
|
|
@ -0,0 +1,247 @@
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"log/slog"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"regexp"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
"text/template"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/generator/cmd/tools"
|
||||||
|
)
|
||||||
|
|
||||||
|
var outFile string
|
||||||
|
|
||||||
|
var docsCmd = &cobra.Command{
|
||||||
|
Use: "docs",
|
||||||
|
Short: "handle documentation",
|
||||||
|
Long: `...`,
|
||||||
|
RunE: func(_ *cobra.Command, _ []string) error {
|
||||||
|
// filePathStr := "stackit/internal/services/postgresflexalpha/database/datasources_gen/database_data_source_gen.go"
|
||||||
|
//
|
||||||
|
// src, err := os.ReadFile(filePathStr)
|
||||||
|
// if err != nil {
|
||||||
|
// return err
|
||||||
|
//}
|
||||||
|
//
|
||||||
|
// i := interp.New(
|
||||||
|
// interp.Options{
|
||||||
|
// GoPath: "/home/henselinm/.asdf/installs/golang/1.25.6/packages",
|
||||||
|
// BuildTags: nil,
|
||||||
|
// Stdin: nil,
|
||||||
|
// Stdout: nil,
|
||||||
|
// Stderr: nil,
|
||||||
|
// Args: nil,
|
||||||
|
// Env: nil,
|
||||||
|
// SourcecodeFilesystem: nil,
|
||||||
|
// Unrestricted: false,
|
||||||
|
// },
|
||||||
|
//)
|
||||||
|
// err = i.Use(i.Symbols("github.com/hashicorp/terraform-plugin-framework-validators"))
|
||||||
|
// if err != nil {
|
||||||
|
// return err
|
||||||
|
//}
|
||||||
|
// err = i.Use(stdlib.Symbols)
|
||||||
|
// if err != nil {
|
||||||
|
// return err
|
||||||
|
//}
|
||||||
|
// _, err = i.Eval(string(src))
|
||||||
|
// if err != nil {
|
||||||
|
// return err
|
||||||
|
//}
|
||||||
|
//
|
||||||
|
// v, err := i.Eval("DatabaseDataSourceSchema")
|
||||||
|
// if err != nil {
|
||||||
|
// return err
|
||||||
|
//}
|
||||||
|
//
|
||||||
|
// bar := v.Interface().(func(string) string)
|
||||||
|
//
|
||||||
|
// r := bar("Kung")
|
||||||
|
// println(r)
|
||||||
|
//
|
||||||
|
// evalPath, err := i.EvalPath(filePathStr)
|
||||||
|
// if err != nil {
|
||||||
|
// return err
|
||||||
|
//}
|
||||||
|
//
|
||||||
|
// fmt.Printf("%+v\n", evalPath)
|
||||||
|
|
||||||
|
// _, err = i.Eval(`import "fmt"`)
|
||||||
|
// if err != nil {
|
||||||
|
// return err
|
||||||
|
//}
|
||||||
|
// _, err = i.Eval(`func Hallo() { fmt.Println("Hi!") }`)
|
||||||
|
// if err != nil {
|
||||||
|
// return err
|
||||||
|
//}
|
||||||
|
|
||||||
|
// v = i.Symbols("Hallo")
|
||||||
|
|
||||||
|
// fmt.Println(v)
|
||||||
|
return workDocs()
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
type NavDocs struct {
|
||||||
|
PageTitle string
|
||||||
|
Description string
|
||||||
|
NavigationTitle string
|
||||||
|
ProviderTitle string
|
||||||
|
IndexFound bool
|
||||||
|
Services []Service
|
||||||
|
}
|
||||||
|
|
||||||
|
type Service struct {
|
||||||
|
ServiceTitle string
|
||||||
|
DataSources []ResItem
|
||||||
|
Resources []ResItem
|
||||||
|
}
|
||||||
|
|
||||||
|
type ResItem struct {
|
||||||
|
ItemName string
|
||||||
|
ItemLink string
|
||||||
|
}
|
||||||
|
|
||||||
|
func workDocs() error {
|
||||||
|
slog.Info("creating docs navigation")
|
||||||
|
root, err := tools.GetGitRoot()
|
||||||
|
if err != nil {
|
||||||
|
slog.Error("ERROR", "err", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
nav := NavDocs{
|
||||||
|
PageTitle: "STACKIT terraform provider PRIVATE-PREVIEW",
|
||||||
|
Description: "",
|
||||||
|
NavigationTitle: "Navigation",
|
||||||
|
ProviderTitle: "Provider",
|
||||||
|
IndexFound: false,
|
||||||
|
}
|
||||||
|
startPath := path.Join(root, "docs")
|
||||||
|
|
||||||
|
docs, err := os.ReadDir(startPath)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
services := make(map[string]Service)
|
||||||
|
dataSources := make(map[string][]ResItem)
|
||||||
|
resources := make(map[string][]ResItem)
|
||||||
|
|
||||||
|
for _, entry := range docs {
|
||||||
|
if !entry.IsDir() {
|
||||||
|
if entry.Name() == "index.md" {
|
||||||
|
slog.Debug(" found provider index file")
|
||||||
|
nav.IndexFound = true
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
slog.Debug(" found am ignored file", "fileName", entry.Name())
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if entry.Name() != "data-sources" && entry.Name() != "resources" {
|
||||||
|
slog.Error("unable to handle entry, skipping", "entry", entry.Name())
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
elements, err := os.ReadDir(path.Join(startPath, entry.Name()))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
for _, res := range elements {
|
||||||
|
if res.IsDir() {
|
||||||
|
slog.Warn("found unexpected directory", "dir", res.Name())
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
re := regexp.MustCompile(`([a-z]+)_([a-z]+).md`)
|
||||||
|
matches := re.FindAllStringSubmatch(res.Name(), -1)
|
||||||
|
if matches == nil {
|
||||||
|
slog.Error("unable to identify resource", "item", res.Name())
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
services[matches[0][1]] = Service{
|
||||||
|
ServiceTitle: matches[0][1],
|
||||||
|
}
|
||||||
|
switch entry.Name() {
|
||||||
|
case "data-sources":
|
||||||
|
dataSources[matches[0][1]] = append(dataSources[matches[0][1]], ResItem{
|
||||||
|
ItemName: matches[0][2],
|
||||||
|
ItemLink: fmt.Sprintf("/docs/docs/%s/%s", entry.Name(), matches[0][0]),
|
||||||
|
})
|
||||||
|
case "resources":
|
||||||
|
resources[matches[0][1]] = append(resources[matches[0][1]], ResItem{
|
||||||
|
ItemName: matches[0][2],
|
||||||
|
ItemLink: fmt.Sprintf("/docs/docs/%s/%s", entry.Name(), matches[0][0]),
|
||||||
|
})
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("this should never have happened")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
keys := make([]string, 0, len(services))
|
||||||
|
for k := range services {
|
||||||
|
keys = append(keys, k)
|
||||||
|
}
|
||||||
|
sort.Strings(keys)
|
||||||
|
|
||||||
|
for _, name := range keys {
|
||||||
|
item := services[name]
|
||||||
|
item.DataSources = dataSources[name]
|
||||||
|
item.Resources = resources[name]
|
||||||
|
nav.Services = append(nav.Services, item)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn := template.FuncMap{
|
||||||
|
"ucfirst": ucfirst,
|
||||||
|
}
|
||||||
|
|
||||||
|
tmpl, err := template.
|
||||||
|
New("nav.md.gompl").
|
||||||
|
Funcs(fn).
|
||||||
|
ParseFiles(path.Join(root, "generator", "cmd", "docs", "templates", "nav.md.gompl"))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
var f *os.File
|
||||||
|
f, err = os.Create(outFile)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
err = tmpl.Execute(f, nav)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
err = f.Close()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
slog.Info("finished")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewDocsCmd() *cobra.Command {
|
||||||
|
return docsCmd
|
||||||
|
}
|
||||||
|
|
||||||
|
func ucfirst(s string) string {
|
||||||
|
if s == "" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return strings.ToUpper(s[:1]) + s[1:]
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() { // nolint: gochecknoinits
|
||||||
|
docsCmd.Flags().StringVarP(&outFile, "outFile", "o", "nav.md", "nav.md")
|
||||||
|
}
|
||||||
27
generator/cmd/docs/templates/nav.md.gompl
vendored
Normal file
27
generator/cmd/docs/templates/nav.md.gompl
vendored
Normal file
|
|
@ -0,0 +1,27 @@
|
||||||
|
---
|
||||||
|
page_title: {{ .PageTitle }}
|
||||||
|
description: {{ .Description }}
|
||||||
|
---
|
||||||
|
## {{ .NavigationTitle }}
|
||||||
|
### {{ .ProviderTitle }}
|
||||||
|
{{ if .IndexFound }}
|
||||||
|
[Provider](/docs/docs/index.md)
|
||||||
|
{{ end }}
|
||||||
|
{{- range $index, $service := .Services }}
|
||||||
|
### {{ $service.ServiceTitle }}
|
||||||
|
<details>
|
||||||
|
|
||||||
|
#### data sources
|
||||||
|
|
||||||
|
{{- range $service.DataSources }}
|
||||||
|
- [{{ .ItemName }}]({{ .ItemLink }})
|
||||||
|
{{- end }}
|
||||||
|
|
||||||
|
#### resources
|
||||||
|
|
||||||
|
{{- range $service.Resources }}
|
||||||
|
- [{{ .ItemName }}]({{ .ItemLink }})
|
||||||
|
{{- end }}
|
||||||
|
</details>
|
||||||
|
|
||||||
|
{{ end }}
|
||||||
|
|
@ -6,7 +6,7 @@ import (
|
||||||
|
|
||||||
func NewRootCmd() *cobra.Command {
|
func NewRootCmd() *cobra.Command {
|
||||||
return &cobra.Command{
|
return &cobra.Command{
|
||||||
Use: "build-tools",
|
Use: "generator",
|
||||||
Short: "...",
|
Short: "...",
|
||||||
Long: "...",
|
Long: "...",
|
||||||
SilenceErrors: true, // Error is beautified in a custom way before being printed
|
SilenceErrors: true, // Error is beautified in a custom way before being printed
|
||||||
|
|
|
||||||
20
generator/cmd/tools/tools.go
Normal file
20
generator/cmd/tools/tools.go
Normal file
|
|
@ -0,0 +1,20 @@
|
||||||
|
package tools
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os/exec"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
func GetGitRoot() (string, error) {
|
||||||
|
cmd := exec.Command("git", "rev-parse", "--show-toplevel")
|
||||||
|
out, err := cmd.Output()
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
lines := strings.Split(string(out), "\n")
|
||||||
|
if lines[0] == "" {
|
||||||
|
return "", fmt.Errorf("unable to determine root directory from git")
|
||||||
|
}
|
||||||
|
return lines[0], nil
|
||||||
|
}
|
||||||
|
|
@ -31,6 +31,7 @@ func main() {
|
||||||
cmd.NewPublishCmd(),
|
cmd.NewPublishCmd(),
|
||||||
cmd.NewGetFieldsCmd(),
|
cmd.NewGetFieldsCmd(),
|
||||||
cmd.NewExamplesCmd(),
|
cmd.NewExamplesCmd(),
|
||||||
|
cmd.NewDocsCmd(),
|
||||||
)
|
)
|
||||||
|
|
||||||
err := rootCmd.Execute()
|
err := rootCmd.Execute()
|
||||||
|
|
|
||||||
253
go.mod
253
go.mod
|
|
@ -1,26 +1,25 @@
|
||||||
module tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview
|
module tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview
|
||||||
|
|
||||||
go 1.25.6
|
go 1.26.2
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/SladkyCitron/slogcolor v1.8.0
|
github.com/SladkyCitron/slogcolor v1.9.0
|
||||||
github.com/golang-jwt/jwt/v5 v5.3.1
|
|
||||||
github.com/google/go-cmp v0.7.0
|
github.com/google/go-cmp v0.7.0
|
||||||
github.com/google/uuid v1.6.0
|
github.com/google/uuid v1.6.0
|
||||||
github.com/hashicorp/terraform-plugin-framework v1.18.0
|
github.com/hashicorp/terraform-plugin-framework v1.19.0
|
||||||
github.com/hashicorp/terraform-plugin-framework-validators v0.19.0
|
github.com/hashicorp/terraform-plugin-framework-validators v0.19.0
|
||||||
github.com/hashicorp/terraform-plugin-go v0.30.0
|
github.com/hashicorp/terraform-plugin-go v0.31.0
|
||||||
github.com/hashicorp/terraform-plugin-log v0.10.0
|
github.com/hashicorp/terraform-plugin-log v0.10.0
|
||||||
github.com/hashicorp/terraform-plugin-testing v1.14.0
|
github.com/hashicorp/terraform-plugin-testing v1.16.0
|
||||||
github.com/iancoleman/strcase v0.3.0
|
github.com/iancoleman/strcase v0.3.0
|
||||||
github.com/ivanpirog/coloredcobra v1.0.1
|
github.com/ivanpirog/coloredcobra v1.0.1
|
||||||
github.com/jarcoal/httpmock v1.4.1
|
github.com/jarcoal/httpmock v1.4.1
|
||||||
github.com/joho/godotenv v1.5.1
|
github.com/joho/godotenv v1.5.1
|
||||||
github.com/ldez/go-git-cmd-wrapper/v2 v2.9.1
|
github.com/ldez/go-git-cmd-wrapper/v2 v2.9.1
|
||||||
github.com/spf13/cobra v1.10.2
|
github.com/spf13/cobra v1.10.2
|
||||||
github.com/stackitcloud/stackit-sdk-go/core v0.22.0
|
github.com/stackitcloud/stackit-sdk-go/core v0.26.0
|
||||||
github.com/stackitcloud/stackit-sdk-go/services/postgresflex v1.4.0
|
github.com/stackitcloud/stackit-sdk-go/services/postgresflex v1.8.0
|
||||||
github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex v1.5.0
|
github.com/stackitcloud/stackit-sdk-go/services/sqlserverflex v1.10.0
|
||||||
github.com/teambition/rrule-go v1.8.2
|
github.com/teambition/rrule-go v1.8.2
|
||||||
gopkg.in/yaml.v3 v3.0.1
|
gopkg.in/yaml.v3 v3.0.1
|
||||||
)
|
)
|
||||||
|
|
@ -28,261 +27,77 @@ require (
|
||||||
require github.com/hashicorp/go-retryablehttp v0.7.8 // indirect
|
require github.com/hashicorp/go-retryablehttp v0.7.8 // indirect
|
||||||
|
|
||||||
require (
|
require (
|
||||||
4d63.com/gocheckcompilerdirectives v1.3.0 // indirect
|
|
||||||
4d63.com/gochecknoglobals v0.2.2 // indirect
|
|
||||||
codeberg.org/chavacava/garif v0.2.0 // indirect
|
|
||||||
codeberg.org/polyfloyd/go-errorlint v1.9.0 // indirect
|
|
||||||
dario.cat/mergo v1.0.1 // indirect
|
dario.cat/mergo v1.0.1 // indirect
|
||||||
dev.gaijin.team/go/exhaustruct/v4 v4.0.0 // indirect
|
github.com/BurntSushi/toml v1.2.1 // indirect
|
||||||
dev.gaijin.team/go/golib v0.6.0 // indirect
|
|
||||||
github.com/4meepo/tagalign v1.4.3 // indirect
|
|
||||||
github.com/Abirdcfly/dupword v0.1.7 // indirect
|
|
||||||
github.com/AdminBenni/iota-mixing v1.0.0 // indirect
|
|
||||||
github.com/AlwxSin/noinlineerr v1.0.5 // indirect
|
|
||||||
github.com/Antonboom/errname v1.1.1 // indirect
|
|
||||||
github.com/Antonboom/nilnil v1.1.1 // indirect
|
|
||||||
github.com/Antonboom/testifylint v1.6.4 // indirect
|
|
||||||
github.com/BurntSushi/toml v1.6.0 // indirect
|
|
||||||
github.com/Djarvur/go-err113 v0.1.1 // indirect
|
|
||||||
github.com/Kunde21/markdownfmt/v3 v3.1.0 // indirect
|
github.com/Kunde21/markdownfmt/v3 v3.1.0 // indirect
|
||||||
github.com/Masterminds/goutils v1.1.1 // indirect
|
github.com/Masterminds/goutils v1.1.1 // indirect
|
||||||
github.com/Masterminds/semver/v3 v3.4.0 // indirect
|
github.com/Masterminds/semver/v3 v3.2.0 // indirect
|
||||||
github.com/Masterminds/sprig/v3 v3.2.3 // indirect
|
github.com/Masterminds/sprig/v3 v3.2.3 // indirect
|
||||||
github.com/MirrexOne/unqueryvet v1.5.4 // indirect
|
github.com/ProtonMail/go-crypto v1.4.1 // indirect
|
||||||
github.com/OpenPeeDeeP/depguard/v2 v2.2.1 // indirect
|
|
||||||
github.com/ProtonMail/go-crypto v1.4.0 // indirect
|
|
||||||
github.com/agext/levenshtein v1.2.3 // indirect
|
github.com/agext/levenshtein v1.2.3 // indirect
|
||||||
github.com/alecthomas/chroma/v2 v2.23.1 // indirect
|
|
||||||
github.com/alecthomas/go-check-sumtype v0.3.1 // indirect
|
|
||||||
github.com/alexkohler/nakedret/v2 v2.0.6 // indirect
|
|
||||||
github.com/alexkohler/prealloc v1.1.0 // indirect
|
|
||||||
github.com/alfatraining/structtag v1.0.0 // indirect
|
|
||||||
github.com/alingse/asasalint v0.0.11 // indirect
|
|
||||||
github.com/alingse/nilnesserr v0.2.0 // indirect
|
|
||||||
github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect
|
github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect
|
||||||
github.com/armon/go-radix v1.0.0 // indirect
|
github.com/armon/go-radix v1.0.0 // indirect
|
||||||
github.com/ashanbrown/forbidigo/v2 v2.3.0 // indirect
|
|
||||||
github.com/ashanbrown/makezero/v2 v2.1.0 // indirect
|
|
||||||
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
|
|
||||||
github.com/beorn7/perks v1.0.1 // indirect
|
|
||||||
github.com/bgentry/speakeasy v0.1.0 // indirect
|
github.com/bgentry/speakeasy v0.1.0 // indirect
|
||||||
github.com/bkielbasa/cyclop v1.2.3 // indirect
|
github.com/bmatcuk/doublestar/v4 v4.10.0 // indirect
|
||||||
github.com/blizzy78/varnamelen v0.8.0 // indirect
|
|
||||||
github.com/bmatcuk/doublestar/v4 v4.9.1 // indirect
|
|
||||||
github.com/bombsimon/wsl/v4 v4.7.0 // indirect
|
|
||||||
github.com/bombsimon/wsl/v5 v5.6.0 // indirect
|
|
||||||
github.com/breml/bidichk v0.3.3 // indirect
|
|
||||||
github.com/breml/errchkjson v0.4.1 // indirect
|
|
||||||
github.com/butuzov/ireturn v0.4.0 // indirect
|
|
||||||
github.com/butuzov/mirror v1.3.0 // indirect
|
|
||||||
github.com/catenacyber/perfsprint v0.10.1 // indirect
|
|
||||||
github.com/ccojocar/zxcvbn-go v1.0.4 // indirect
|
|
||||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
|
||||||
github.com/charithe/durationcheck v0.0.11 // indirect
|
|
||||||
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc // indirect
|
|
||||||
github.com/charmbracelet/lipgloss v1.1.0 // indirect
|
|
||||||
github.com/charmbracelet/x/ansi v0.10.1 // indirect
|
|
||||||
github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd // indirect
|
|
||||||
github.com/charmbracelet/x/term v0.2.1 // indirect
|
|
||||||
github.com/ckaznocha/intrange v0.3.1 // indirect
|
|
||||||
github.com/cloudflare/circl v1.6.3 // indirect
|
github.com/cloudflare/circl v1.6.3 // indirect
|
||||||
github.com/curioswitch/go-reassign v0.3.0 // indirect
|
github.com/fatih/color v1.19.0 // indirect
|
||||||
github.com/daixiang0/gci v0.13.7 // indirect
|
github.com/golang-jwt/jwt/v5 v5.3.1 // indirect
|
||||||
github.com/dave/dst v0.27.3 // indirect
|
|
||||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
|
|
||||||
github.com/denis-tingaikin/go-header v0.5.0 // indirect
|
|
||||||
github.com/dlclark/regexp2 v1.11.5 // indirect
|
|
||||||
github.com/ettle/strcase v0.2.0 // indirect
|
|
||||||
github.com/fatih/color v1.18.0 // indirect
|
|
||||||
github.com/fatih/structtag v1.2.0 // indirect
|
|
||||||
github.com/firefart/nonamedreturns v1.0.6 // indirect
|
|
||||||
github.com/fsnotify/fsnotify v1.5.4 // indirect
|
|
||||||
github.com/fzipp/gocyclo v0.6.0 // indirect
|
|
||||||
github.com/ghostiam/protogetter v0.3.20 // indirect
|
|
||||||
github.com/go-critic/go-critic v0.14.3 // indirect
|
|
||||||
github.com/go-toolsmith/astcast v1.1.0 // indirect
|
|
||||||
github.com/go-toolsmith/astcopy v1.1.0 // indirect
|
|
||||||
github.com/go-toolsmith/astequal v1.2.0 // indirect
|
|
||||||
github.com/go-toolsmith/astfmt v1.1.0 // indirect
|
|
||||||
github.com/go-toolsmith/astp v1.1.0 // indirect
|
|
||||||
github.com/go-toolsmith/strparse v1.1.0 // indirect
|
|
||||||
github.com/go-toolsmith/typep v1.1.0 // indirect
|
|
||||||
github.com/go-viper/mapstructure/v2 v2.5.0 // indirect
|
|
||||||
github.com/go-xmlfmt/xmlfmt v1.1.3 // indirect
|
|
||||||
github.com/gobwas/glob v0.2.3 // indirect
|
|
||||||
github.com/godoc-lint/godoc-lint v0.11.2 // indirect
|
|
||||||
github.com/gofrs/flock v0.13.0 // indirect
|
|
||||||
github.com/golang/protobuf v1.5.4 // indirect
|
github.com/golang/protobuf v1.5.4 // indirect
|
||||||
github.com/golangci/asciicheck v0.5.0 // indirect
|
|
||||||
github.com/golangci/dupl v0.0.0-20250308024227-f665c8d69b32 // indirect
|
|
||||||
github.com/golangci/go-printf-func-name v0.1.1 // indirect
|
|
||||||
github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d // indirect
|
|
||||||
github.com/golangci/golangci-lint/v2 v2.11.2 // indirect
|
|
||||||
github.com/golangci/golines v0.15.0 // indirect
|
|
||||||
github.com/golangci/misspell v0.8.0 // indirect
|
|
||||||
github.com/golangci/plugin-module-register v0.1.2 // indirect
|
|
||||||
github.com/golangci/revgrep v0.8.0 // indirect
|
|
||||||
github.com/golangci/swaggoswag v0.0.0-20250504205917-77f2aca3143e // indirect
|
|
||||||
github.com/golangci/unconvert v0.0.0-20250410112200-a129a6e6413e // indirect
|
|
||||||
github.com/gordonklaus/ineffassign v0.2.0 // indirect
|
|
||||||
github.com/gostaticanalysis/analysisutil v0.7.1 // indirect
|
|
||||||
github.com/gostaticanalysis/comment v1.5.0 // indirect
|
|
||||||
github.com/gostaticanalysis/forcetypeassert v0.2.0 // indirect
|
|
||||||
github.com/gostaticanalysis/nilerr v0.1.2 // indirect
|
|
||||||
github.com/hashicorp/cli v1.1.7 // indirect
|
github.com/hashicorp/cli v1.1.7 // indirect
|
||||||
github.com/hashicorp/errwrap v1.1.0 // indirect
|
github.com/hashicorp/errwrap v1.1.0 // indirect
|
||||||
github.com/hashicorp/go-checkpoint v0.5.0 // indirect
|
github.com/hashicorp/go-checkpoint v0.5.0 // indirect
|
||||||
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
|
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
|
||||||
github.com/hashicorp/go-cty v1.5.0 // indirect
|
github.com/hashicorp/go-cty v1.5.0 // indirect
|
||||||
github.com/hashicorp/go-hclog v1.6.3 // indirect
|
github.com/hashicorp/go-hclog v1.6.3 // indirect
|
||||||
github.com/hashicorp/go-immutable-radix/v2 v2.1.0 // indirect
|
|
||||||
github.com/hashicorp/go-multierror v1.1.1 // indirect
|
github.com/hashicorp/go-multierror v1.1.1 // indirect
|
||||||
github.com/hashicorp/go-plugin v1.7.0 // indirect
|
github.com/hashicorp/go-plugin v1.8.0 // indirect
|
||||||
github.com/hashicorp/go-uuid v1.0.3 // indirect
|
github.com/hashicorp/go-uuid v1.0.3 // indirect
|
||||||
github.com/hashicorp/go-version v1.8.0 // indirect
|
github.com/hashicorp/go-version v1.9.0 // indirect
|
||||||
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
|
github.com/hashicorp/hc-install v0.9.5 // indirect
|
||||||
github.com/hashicorp/hc-install v0.9.3 // indirect
|
|
||||||
github.com/hashicorp/hcl v1.0.0 // indirect
|
|
||||||
github.com/hashicorp/hcl/v2 v2.24.0 // indirect
|
github.com/hashicorp/hcl/v2 v2.24.0 // indirect
|
||||||
github.com/hashicorp/logutils v1.0.0 // indirect
|
github.com/hashicorp/logutils v1.0.0 // indirect
|
||||||
github.com/hashicorp/terraform-exec v0.25.0 // indirect
|
github.com/hashicorp/terraform-exec v0.25.2 // indirect
|
||||||
github.com/hashicorp/terraform-json v0.27.2 // indirect
|
github.com/hashicorp/terraform-json v0.27.3-0.20260213134036-298b8f6b673a // indirect
|
||||||
github.com/hashicorp/terraform-plugin-docs v0.24.0 // indirect
|
github.com/hashicorp/terraform-plugin-docs v0.25.0 // indirect
|
||||||
github.com/hashicorp/terraform-plugin-sdk/v2 v2.39.0 // indirect
|
github.com/hashicorp/terraform-plugin-sdk/v2 v2.40.1 // indirect
|
||||||
github.com/hashicorp/terraform-registry-address v0.4.0 // indirect
|
github.com/hashicorp/terraform-registry-address v0.4.0 // indirect
|
||||||
github.com/hashicorp/terraform-svchost v0.2.1 // indirect
|
github.com/hashicorp/terraform-svchost v0.2.1 // indirect
|
||||||
github.com/hashicorp/yamux v0.1.2 // indirect
|
github.com/hashicorp/yamux v0.1.2 // indirect
|
||||||
github.com/hexops/gotextdiff v1.0.3 // indirect
|
|
||||||
github.com/huandu/xstrings v1.3.3 // indirect
|
github.com/huandu/xstrings v1.3.3 // indirect
|
||||||
github.com/imdario/mergo v0.3.15 // indirect
|
github.com/imdario/mergo v0.3.15 // indirect
|
||||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||||
github.com/jgautheron/goconst v1.8.2 // indirect
|
|
||||||
github.com/jingyugao/rowserrcheck v1.1.1 // indirect
|
|
||||||
github.com/jjti/go-spancheck v0.6.5 // indirect
|
|
||||||
github.com/julz/importas v0.2.0 // indirect
|
|
||||||
github.com/karamaru-alpha/copyloopvar v1.2.2 // indirect
|
|
||||||
github.com/kisielk/errcheck v1.10.0 // indirect
|
|
||||||
github.com/kkHAIKE/contextcheck v1.1.6 // indirect
|
|
||||||
github.com/kr/text v0.2.0 // indirect
|
|
||||||
github.com/kulti/thelper v0.7.1 // indirect
|
|
||||||
github.com/kunwardeep/paralleltest v1.0.15 // indirect
|
|
||||||
github.com/lasiar/canonicalheader v1.1.2 // indirect
|
|
||||||
github.com/ldez/exptostd v0.4.5 // indirect
|
|
||||||
github.com/ldez/gomoddirectives v0.8.0 // indirect
|
|
||||||
github.com/ldez/grignotin v0.10.1 // indirect
|
|
||||||
github.com/ldez/structtags v0.6.1 // indirect
|
|
||||||
github.com/ldez/tagliatelle v0.7.2 // indirect
|
|
||||||
github.com/ldez/usetesting v0.5.0 // indirect
|
|
||||||
github.com/leonklingele/grouper v1.1.2 // indirect
|
|
||||||
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
|
|
||||||
github.com/macabu/inamedparam v0.2.0 // indirect
|
|
||||||
github.com/magiconair/properties v1.8.6 // indirect
|
|
||||||
github.com/manuelarte/embeddedstructfieldcheck v0.4.0 // indirect
|
|
||||||
github.com/manuelarte/funcorder v0.5.0 // indirect
|
|
||||||
github.com/maratori/testableexamples v1.0.1 // indirect
|
|
||||||
github.com/maratori/testpackage v1.1.2 // indirect
|
|
||||||
github.com/matoous/godox v1.1.0 // indirect
|
|
||||||
github.com/mattn/go-colorable v0.1.14 // indirect
|
github.com/mattn/go-colorable v0.1.14 // indirect
|
||||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
github.com/mattn/go-isatty v0.0.22 // indirect
|
||||||
github.com/mattn/go-runewidth v0.0.16 // indirect
|
github.com/mattn/go-runewidth v0.0.9 // indirect
|
||||||
github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect
|
|
||||||
github.com/mgechev/revive v1.15.0 // indirect
|
|
||||||
github.com/mitchellh/copystructure v1.2.0 // indirect
|
github.com/mitchellh/copystructure v1.2.0 // indirect
|
||||||
github.com/mitchellh/go-homedir v1.1.0 // indirect
|
|
||||||
github.com/mitchellh/go-testing-interface v1.14.1 // indirect
|
github.com/mitchellh/go-testing-interface v1.14.1 // indirect
|
||||||
github.com/mitchellh/go-wordwrap v1.0.1 // indirect
|
github.com/mitchellh/go-wordwrap v1.0.1 // indirect
|
||||||
github.com/mitchellh/mapstructure v1.5.0 // indirect
|
github.com/mitchellh/mapstructure v1.5.0 // indirect
|
||||||
github.com/mitchellh/reflectwalk v1.0.2 // indirect
|
github.com/mitchellh/reflectwalk v1.0.2 // indirect
|
||||||
github.com/moricho/tparallel v0.3.2 // indirect
|
|
||||||
github.com/muesli/termenv v0.16.0 // indirect
|
|
||||||
github.com/nakabonne/nestif v0.3.1 // indirect
|
|
||||||
github.com/nishanths/exhaustive v0.12.0 // indirect
|
|
||||||
github.com/nishanths/predeclared v0.2.2 // indirect
|
|
||||||
github.com/nunnatsa/ginkgolinter v0.23.0 // indirect
|
|
||||||
github.com/oklog/run v1.2.0 // indirect
|
github.com/oklog/run v1.2.0 // indirect
|
||||||
github.com/pelletier/go-toml v1.9.5 // indirect
|
|
||||||
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
|
|
||||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
|
||||||
github.com/posener/complete v1.2.3 // indirect
|
github.com/posener/complete v1.2.3 // indirect
|
||||||
github.com/prometheus/client_golang v1.12.1 // indirect
|
|
||||||
github.com/prometheus/client_model v0.2.0 // indirect
|
|
||||||
github.com/prometheus/common v0.32.1 // indirect
|
|
||||||
github.com/prometheus/procfs v0.7.3 // indirect
|
|
||||||
github.com/quasilyte/go-ruleguard v0.4.5 // indirect
|
|
||||||
github.com/quasilyte/go-ruleguard/dsl v0.3.23 // indirect
|
|
||||||
github.com/quasilyte/gogrep v0.5.0 // indirect
|
|
||||||
github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727 // indirect
|
|
||||||
github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 // indirect
|
|
||||||
github.com/raeperd/recvcheck v0.2.0 // indirect
|
|
||||||
github.com/rivo/uniseg v0.4.7 // indirect
|
|
||||||
github.com/rogpeppe/go-internal v1.14.1 // indirect
|
|
||||||
github.com/ryancurrah/gomodguard v1.4.1 // indirect
|
|
||||||
github.com/ryanrolds/sqlclosecheck v0.5.1 // indirect
|
|
||||||
github.com/sanposhiho/wastedassign/v2 v2.1.0 // indirect
|
|
||||||
github.com/santhosh-tekuri/jsonschema/v6 v6.0.2 // indirect
|
|
||||||
github.com/sashamelentyev/interfacebloat v1.1.0 // indirect
|
|
||||||
github.com/sashamelentyev/usestdlibvars v1.29.0 // indirect
|
|
||||||
github.com/securego/gosec/v2 v2.24.7 // indirect
|
|
||||||
github.com/shopspring/decimal v1.3.1 // indirect
|
github.com/shopspring/decimal v1.3.1 // indirect
|
||||||
github.com/sirupsen/logrus v1.9.4 // indirect
|
|
||||||
github.com/sivchari/containedctx v1.0.3 // indirect
|
|
||||||
github.com/sonatard/noctx v0.5.0 // indirect
|
|
||||||
github.com/sourcegraph/go-diff v0.7.0 // indirect
|
|
||||||
github.com/spf13/afero v1.15.0 // indirect
|
|
||||||
github.com/spf13/cast v1.5.0 // indirect
|
github.com/spf13/cast v1.5.0 // indirect
|
||||||
github.com/spf13/jwalterweatherman v1.1.0 // indirect
|
|
||||||
github.com/spf13/pflag v1.0.10 // indirect
|
github.com/spf13/pflag v1.0.10 // indirect
|
||||||
github.com/spf13/viper v1.12.0 // indirect
|
|
||||||
github.com/ssgreg/nlreturn/v2 v2.2.1 // indirect
|
|
||||||
github.com/stbenjam/no-sprintf-host-port v0.3.1 // indirect
|
|
||||||
github.com/stretchr/objx v0.5.2 // indirect
|
|
||||||
github.com/stretchr/testify v1.11.1 // indirect
|
github.com/stretchr/testify v1.11.1 // indirect
|
||||||
github.com/subosito/gotenv v1.4.1 // indirect
|
|
||||||
github.com/tetafro/godot v1.5.4 // indirect
|
|
||||||
github.com/timakin/bodyclose v0.0.0-20241222091800-1db5c5ca4d67 // indirect
|
|
||||||
github.com/timonwong/loggercheck v0.11.0 // indirect
|
|
||||||
github.com/tomarrell/wrapcheck/v2 v2.12.0 // indirect
|
|
||||||
github.com/tommy-muehle/go-mnd/v2 v2.5.1 // indirect
|
|
||||||
github.com/ultraware/funlen v0.2.0 // indirect
|
|
||||||
github.com/ultraware/whitespace v0.2.0 // indirect
|
|
||||||
github.com/uudashr/gocognit v1.2.1 // indirect
|
|
||||||
github.com/uudashr/iface v1.4.1 // indirect
|
|
||||||
github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect
|
github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect
|
||||||
github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect
|
github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect
|
||||||
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
|
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
|
||||||
github.com/xen0n/gosmopolitan v1.3.0 // indirect
|
|
||||||
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
|
|
||||||
github.com/yagipy/maintidx v1.0.0 // indirect
|
|
||||||
github.com/yeya24/promlinter v0.3.0 // indirect
|
|
||||||
github.com/ykadowak/zerologlint v0.1.5 // indirect
|
|
||||||
github.com/yuin/goldmark v1.7.7 // indirect
|
github.com/yuin/goldmark v1.7.7 // indirect
|
||||||
github.com/yuin/goldmark-meta v1.1.0 // indirect
|
github.com/yuin/goldmark-meta v1.1.0 // indirect
|
||||||
github.com/zclconf/go-cty v1.18.0 // indirect
|
github.com/zclconf/go-cty v1.18.1 // indirect
|
||||||
gitlab.com/bosi/decorder v0.4.2 // indirect
|
|
||||||
go-simpler.org/musttag v0.14.0 // indirect
|
|
||||||
go-simpler.org/sloglint v0.11.1 // indirect
|
|
||||||
go.abhg.dev/goldmark/frontmatter v0.2.0 // indirect
|
go.abhg.dev/goldmark/frontmatter v0.2.0 // indirect
|
||||||
go.augendre.info/arangolint v0.4.0 // indirect
|
golang.org/x/crypto v0.50.0 // indirect
|
||||||
go.augendre.info/fatcontext v0.9.0 // indirect
|
golang.org/x/exp v0.0.0-20230626212559-97b1e661b5df // indirect
|
||||||
go.uber.org/multierr v1.10.0 // indirect
|
golang.org/x/mod v0.35.0 // indirect
|
||||||
go.uber.org/zap v1.27.0 // indirect
|
golang.org/x/net v0.53.0 // indirect
|
||||||
go.yaml.in/yaml/v3 v3.0.4 // indirect
|
golang.org/x/sync v0.20.0 // indirect
|
||||||
golang.org/x/crypto v0.48.0 // indirect
|
golang.org/x/sys v0.43.0 // indirect
|
||||||
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b // indirect
|
golang.org/x/text v0.36.0 // indirect
|
||||||
golang.org/x/exp/typeparams v0.0.0-20260209203927-2842357ff358 // indirect
|
golang.org/x/tools v0.44.0 // indirect
|
||||||
golang.org/x/mod v0.33.0 // indirect
|
|
||||||
golang.org/x/net v0.51.0 // indirect
|
|
||||||
golang.org/x/sync v0.19.0 // indirect
|
|
||||||
golang.org/x/sys v0.41.0 // indirect
|
|
||||||
golang.org/x/text v0.34.0 // indirect
|
|
||||||
golang.org/x/tools v0.42.0 // indirect
|
|
||||||
google.golang.org/appengine v1.6.8 // indirect
|
google.golang.org/appengine v1.6.8 // indirect
|
||||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20260226221140-a57be14db171 // indirect
|
google.golang.org/genproto/googleapis/rpc v0.0.0-20260504160031-60b97b32f348 // indirect
|
||||||
google.golang.org/grpc v1.79.2 // indirect
|
google.golang.org/grpc v1.81.0 // indirect
|
||||||
google.golang.org/protobuf v1.36.11 // indirect
|
google.golang.org/protobuf v1.36.11 // indirect
|
||||||
gopkg.in/ini.v1 v1.67.0 // indirect
|
|
||||||
gopkg.in/yaml.v2 v2.4.0 // indirect
|
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||||
honnef.co/go/tools v0.7.0 // indirect
|
|
||||||
mvdan.cc/gofumpt v0.9.2 // indirect
|
|
||||||
mvdan.cc/unparam v0.0.0-20251027182757-5beb8c8f8f15 // indirect
|
|
||||||
)
|
)
|
||||||
|
|
|
||||||
97
golang-ci.yaml.bak
Normal file
97
golang-ci.yaml.bak
Normal file
|
|
@ -0,0 +1,97 @@
|
||||||
|
|
||||||
|
version: "2"
|
||||||
|
run:
|
||||||
|
concurrency: 4
|
||||||
|
output:
|
||||||
|
formats:
|
||||||
|
text:
|
||||||
|
print-linter-name: true
|
||||||
|
print-issued-lines: true
|
||||||
|
colors: true
|
||||||
|
path: stdout
|
||||||
|
linters:
|
||||||
|
enable:
|
||||||
|
- bodyclose
|
||||||
|
- depguard
|
||||||
|
- errorlint
|
||||||
|
- forcetypeassert
|
||||||
|
- gochecknoinits
|
||||||
|
- gocritic
|
||||||
|
- gosec
|
||||||
|
- misspell
|
||||||
|
- nakedret
|
||||||
|
- revive
|
||||||
|
- sqlclosecheck
|
||||||
|
- wastedassign
|
||||||
|
disable:
|
||||||
|
- noctx
|
||||||
|
- unparam
|
||||||
|
settings:
|
||||||
|
depguard:
|
||||||
|
rules:
|
||||||
|
main:
|
||||||
|
list-mode: lax
|
||||||
|
allow:
|
||||||
|
- tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview
|
||||||
|
- github.com/hashicorp/terraform-plugin-framework
|
||||||
|
- github.com/hashicorp/terraform-plugin-log
|
||||||
|
- github.com/stackitcloud/stackit-sdk-go
|
||||||
|
deny:
|
||||||
|
- pkg: github.com/stretchr/testify
|
||||||
|
desc: Do not use a testing framework
|
||||||
|
gocritic:
|
||||||
|
disabled-checks:
|
||||||
|
- wrapperFunc
|
||||||
|
- typeDefFirst
|
||||||
|
- ifElseChain
|
||||||
|
- dupImport
|
||||||
|
- hugeParam
|
||||||
|
enabled-tags:
|
||||||
|
- performance
|
||||||
|
- style
|
||||||
|
- experimental
|
||||||
|
gosec:
|
||||||
|
excludes:
|
||||||
|
- G104
|
||||||
|
- G102
|
||||||
|
- G304
|
||||||
|
- G307
|
||||||
|
misspell:
|
||||||
|
locale: US
|
||||||
|
nakedret:
|
||||||
|
max-func-lines: 0
|
||||||
|
revive:
|
||||||
|
severity: error
|
||||||
|
rules:
|
||||||
|
- name: errorf
|
||||||
|
- name: context-as-argument
|
||||||
|
- name: error-return
|
||||||
|
- name: increment-decrement
|
||||||
|
- name: indent-error-flow
|
||||||
|
- name: superfluous-else
|
||||||
|
- name: unused-parameter
|
||||||
|
- name: unreachable-code
|
||||||
|
- name: atomic
|
||||||
|
- name: empty-lines
|
||||||
|
- name: early-return
|
||||||
|
exclusions:
|
||||||
|
paths:
|
||||||
|
- stackit-sdk-generator/
|
||||||
|
- generated/
|
||||||
|
- pkg_gen/
|
||||||
|
generated: lax
|
||||||
|
warn-unused: true
|
||||||
|
# Excluding configuration per-path, per-linter, per-text and per-source.
|
||||||
|
rules:
|
||||||
|
# Exclude some linters from running on tests files.
|
||||||
|
- path: _test\.go
|
||||||
|
linters:
|
||||||
|
- gochecknoinits
|
||||||
|
formatters:
|
||||||
|
enable:
|
||||||
|
- gofmt
|
||||||
|
- goimports
|
||||||
|
settings:
|
||||||
|
goimports:
|
||||||
|
local-prefixes:
|
||||||
|
- tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview
|
||||||
|
|
@ -53,7 +53,7 @@ func CreateTemporaryHome(createValidCredentialsFile bool, t *testing.T) string {
|
||||||
|
|
||||||
// Define content, default = invalid token
|
// Define content, default = invalid token
|
||||||
token := "foo_token"
|
token := "foo_token"
|
||||||
//if createValidCredentialsFile {
|
// if createValidCredentialsFile {
|
||||||
// token = GetTestProjectServiceAccountJson("")
|
// token = GetTestProjectServiceAccountJson("")
|
||||||
//}
|
//}
|
||||||
if _, err = file.WriteString(token); err != nil {
|
if _, err = file.WriteString(token); err != nil {
|
||||||
|
|
|
||||||
|
|
@ -99,7 +99,7 @@ func ResourceNameWithDateTime(name string) string {
|
||||||
return fmt.Sprintf("tf-acc-%s-%s", name, dateTimeTrimmed)
|
return fmt.Sprintf("tf-acc-%s-%s", name, dateTimeTrimmed)
|
||||||
}
|
}
|
||||||
|
|
||||||
//func GetTestProjectServiceAccountJson(path string) string {
|
// func GetTestProjectServiceAccountJson(path string) string {
|
||||||
// var err error
|
// var err error
|
||||||
// json, ok := os.LookupEnv("TF_ACC_SERVICE_ACCOUNT_JSON_CONTENT")
|
// json, ok := os.LookupEnv("TF_ACC_SERVICE_ACCOUNT_JSON_CONTENT")
|
||||||
// if !ok || json == "" {
|
// if !ok || json == "" {
|
||||||
|
|
@ -153,7 +153,7 @@ func ResourceNameWithDateTime(name string) string {
|
||||||
// return credentials.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN, nil
|
// return credentials.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN, nil
|
||||||
//}
|
//}
|
||||||
|
|
||||||
//func readTestServiceAccountJsonFromFile(path string) (string, error) {
|
// func readTestServiceAccountJsonFromFile(path string) (string, error) {
|
||||||
// if path == "" {
|
// if path == "" {
|
||||||
// customPath, ok := os.LookupEnv("TF_ACC_SERVICE_ACCOUNT_FILE")
|
// customPath, ok := os.LookupEnv("TF_ACC_SERVICE_ACCOUNT_FILE")
|
||||||
// if !ok || customPath == "" {
|
// if !ok || customPath == "" {
|
||||||
|
|
|
||||||
7
sample/.gitignore
vendored
Normal file
7
sample/.gitignore
vendored
Normal file
|
|
@ -0,0 +1,7 @@
|
||||||
|
*.json
|
||||||
|
*.bak
|
||||||
|
*.tfstate
|
||||||
|
*.tfstate.backup
|
||||||
|
terraform
|
||||||
|
variables.tf
|
||||||
|
*.tfrc
|
||||||
38
sample/alpha-from-registry/key.tf
Normal file
38
sample/alpha-from-registry/key.tf
Normal file
|
|
@ -0,0 +1,38 @@
|
||||||
|
resource "stackit_kms_keyring" "mshalpha-keyring" {
|
||||||
|
project_id = var.project_id
|
||||||
|
display_name = "msh-alpha-tests"
|
||||||
|
description = "This is a test keyring for private endpoints"
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "stackit_kms_key" "mshalpha-key01" {
|
||||||
|
project_id = var.project_id
|
||||||
|
keyring_id = stackit_kms_keyring.mshalpha-keyring.keyring_id
|
||||||
|
display_name = "mshalpha-key01"
|
||||||
|
protection = "software"
|
||||||
|
algorithm = "aes_256_gcm"
|
||||||
|
purpose = "symmetric_encrypt_decrypt"
|
||||||
|
access_scope = "SNA"
|
||||||
|
}
|
||||||
|
|
||||||
|
output "keyid" {
|
||||||
|
value = stackit_kms_key.mshalpha-key01.key_id
|
||||||
|
}
|
||||||
|
|
||||||
|
# (because stackit_kms_key.key001 is not in configuration)
|
||||||
|
resource "stackit_kms_key" "key001" {
|
||||||
|
access_scope = "SNA"
|
||||||
|
algorithm = "aes_256_gcm"
|
||||||
|
display_name = "msh-key-sna01"
|
||||||
|
keyring_id = stackit_kms_keyring.keyring001.keyring_id
|
||||||
|
project_id = var.project_id
|
||||||
|
protection = "software"
|
||||||
|
purpose = "symmetric_encrypt_decrypt"
|
||||||
|
}
|
||||||
|
|
||||||
|
# stackit_kms_keyring.keyring001 will be destroyed
|
||||||
|
# (because stackit_kms_keyring.keyring001 is not in configuration)
|
||||||
|
resource "stackit_kms_keyring" "keyring001" {
|
||||||
|
description = "This is a test keyring for private endpoints"
|
||||||
|
display_name = "msh-keyring-sna01"
|
||||||
|
project_id = var.project_id
|
||||||
|
}
|
||||||
96
sample/alpha-from-registry/postresql.tf
Normal file
96
sample/alpha-from-registry/postresql.tf
Normal file
|
|
@ -0,0 +1,96 @@
|
||||||
|
|
||||||
|
data "stackitprivatepreview_postgresflexalpha_flavor" "pgsql_flavor" {
|
||||||
|
project_id = var.project_id
|
||||||
|
region = "eu01"
|
||||||
|
cpu = 2
|
||||||
|
ram = 4
|
||||||
|
node_type = "Single"
|
||||||
|
storage_class = "premium-perf2-stackit"
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "stackitprivatepreview_postgresflexalpha_instance" "msh-alpha-sna-enc" {
|
||||||
|
project_id = var.project_id
|
||||||
|
name = "msh-alpha-sna-enc"
|
||||||
|
backup_schedule = "0 0 * * *"
|
||||||
|
retention_days = 45
|
||||||
|
flavor_id = data.stackitprivatepreview_postgresflexalpha_flavor.pgsql_flavor.flavor_id
|
||||||
|
replicas = 1
|
||||||
|
storage = {
|
||||||
|
performance_class = "premium-perf2-stackit"
|
||||||
|
size = 10
|
||||||
|
}
|
||||||
|
encryption = {
|
||||||
|
kek_key_id = stackit_kms_key.mshalpha-key01.key_id
|
||||||
|
kek_key_ring_id = stackit_kms_keyring.mshalpha-keyring.keyring_id
|
||||||
|
kek_key_version = 1
|
||||||
|
service_account = var.sa_email
|
||||||
|
}
|
||||||
|
network = {
|
||||||
|
acl = ["0.0.0.0/0", "193.148.160.0/19", "170.85.2.177/32"]
|
||||||
|
access_scope = "SNA"
|
||||||
|
}
|
||||||
|
version = 17
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "stackitprivatepreview_postgresflexalpha_instance" "msh-alpha-nosna-noenc" {
|
||||||
|
project_id = var.project_id
|
||||||
|
name = "msh-alpha-nosna-enc"
|
||||||
|
backup_schedule = "0 0 * * *"
|
||||||
|
retention_days = 45
|
||||||
|
flavor_id = data.stackitprivatepreview_postgresflexalpha_flavor.pgsql_flavor.flavor_id
|
||||||
|
replicas = 1
|
||||||
|
storage = {
|
||||||
|
performance_class = "premium-perf2-stackit"
|
||||||
|
size = 10
|
||||||
|
}
|
||||||
|
network = {
|
||||||
|
acl = ["0.0.0.0/0", "193.148.160.0/19", "170.85.2.177/32"]
|
||||||
|
access_scope = "PUBLIC"
|
||||||
|
}
|
||||||
|
version = 16
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "stackitprivatepreview_postgresflexalpha_user" "ptlsdbadminuser" {
|
||||||
|
project_id = var.project_id
|
||||||
|
instance_id = stackitprivatepreview_postgresflexalpha_instance.msh-alpha-sna-enc.instance_id
|
||||||
|
name = var.db_admin_username
|
||||||
|
roles = ["createdb", "login"]
|
||||||
|
# roles = ["createdb", "login", "createrole"]
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "stackitprivatepreview_postgresflexalpha_user" "ptlsdbuser" {
|
||||||
|
project_id = var.project_id
|
||||||
|
instance_id = stackitprivatepreview_postgresflexalpha_instance.msh-alpha-sna-enc.instance_id
|
||||||
|
name = var.db_username
|
||||||
|
roles = ["login"]
|
||||||
|
# roles = ["createdb", "login", "createrole"]
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "stackitprivatepreview_postgresflexalpha_database" "example" {
|
||||||
|
count = 5
|
||||||
|
depends_on = [stackitprivatepreview_postgresflexalpha_user.ptlsdbadminuser]
|
||||||
|
project_id = var.project_id
|
||||||
|
instance_id = stackitprivatepreview_postgresflexalpha_instance.msh-alpha-sna-enc.instance_id
|
||||||
|
name = "${var.db_name}${count.index}"
|
||||||
|
owner = var.db_admin_username
|
||||||
|
}
|
||||||
|
|
||||||
|
# data "stackitprivatepreview_postgresflexalpha_instance" "datapsql" {
|
||||||
|
# project_id = var.project_id
|
||||||
|
# instance_id = var.instance_id
|
||||||
|
# region = "eu01"
|
||||||
|
# }
|
||||||
|
|
||||||
|
# output "psql_instance_id" {
|
||||||
|
# value = data.stackitprivatepreview_postgresflexalpha_instance.datapsql.instance_id
|
||||||
|
# }
|
||||||
|
|
||||||
|
output "psql_user_password" {
|
||||||
|
value = stackitprivatepreview_postgresflexalpha_user.ptlsdbuser.password
|
||||||
|
sensitive = true
|
||||||
|
}
|
||||||
|
|
||||||
|
# output "psql_user_conn" {
|
||||||
|
# value = stackitprivatepreview_postgresflexalpha_user.ptlsdbuser.connection_string
|
||||||
|
# sensitive = true
|
||||||
|
# }
|
||||||
24
sample/alpha-from-registry/providers.tf
Normal file
24
sample/alpha-from-registry/providers.tf
Normal file
|
|
@ -0,0 +1,24 @@
|
||||||
|
|
||||||
|
terraform {
|
||||||
|
required_providers {
|
||||||
|
stackit = {
|
||||||
|
source = "registry.terraform.io/stackitcloud/stackit"
|
||||||
|
version = "~> 0.70"
|
||||||
|
}
|
||||||
|
stackitprivatepreview = {
|
||||||
|
source = "tfregistry.sysops.stackit.rocks/mhenselin/stackitprivatepreview"
|
||||||
|
version = ">=0.1.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
provider "stackit" {
|
||||||
|
default_region = "eu01"
|
||||||
|
enable_beta_resources = true
|
||||||
|
service_account_key_path = "../service_account.json"
|
||||||
|
}
|
||||||
|
|
||||||
|
provider "stackitprivatepreview" {
|
||||||
|
default_region = "eu01"
|
||||||
|
service_account_key_path = "../service_account.json"
|
||||||
|
}
|
||||||
10
sample/config.tfrc.example
Normal file
10
sample/config.tfrc.example
Normal file
|
|
@ -0,0 +1,10 @@
|
||||||
|
provider_installation {
|
||||||
|
dev_overrides {
|
||||||
|
"registry.terraform.io/mhenselin/stackitprivatepreview" = "<CURRENT PROJECT PATH>/bin/"
|
||||||
|
}
|
||||||
|
|
||||||
|
# For all other providers, install them directly from their origin provider
|
||||||
|
# registries as normal. If you omit this, Terraform will _only_ use
|
||||||
|
# the dev_overrides block, and so no other providers will be available.
|
||||||
|
direct {}
|
||||||
|
}
|
||||||
57
sample/kms/kms.tf
Normal file
57
sample/kms/kms.tf
Normal file
|
|
@ -0,0 +1,57 @@
|
||||||
|
resource "stackit_kms_keyring" "keyring001" {
|
||||||
|
project_id = var.project_id
|
||||||
|
display_name = "msh-keyring-sna01"
|
||||||
|
description = "This is a test keyring for private endpoints"
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "stackit_kms_key" "key001" {
|
||||||
|
project_id = var.project_id
|
||||||
|
keyring_id = stackit_kms_keyring.keyring001.keyring_id
|
||||||
|
display_name = "msh-key-sna01"
|
||||||
|
protection = "software"
|
||||||
|
algorithm = "aes_256_gcm"
|
||||||
|
purpose = "symmetric_encrypt_decrypt"
|
||||||
|
access_scope = "SNA"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# data "stackitprivatepreview_sqlserverflexalpha_instance" "test" {
|
||||||
|
# project_id = var.project_id
|
||||||
|
# instance_id = var.instance_id
|
||||||
|
# region = "eu01"
|
||||||
|
# }
|
||||||
|
|
||||||
|
output "key_ring_id" {
|
||||||
|
value = stackit_kms_keyring.keyring001.id
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "stackit_kms_keyring" "keyring001yy" {
|
||||||
|
project_id = var.project_id
|
||||||
|
display_name = "msh-kr-sna01"
|
||||||
|
description = "This is a test keyring for private endpoints"
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "stackit_kms_key" "key001yy" {
|
||||||
|
project_id = var.project_id
|
||||||
|
keyring_id = stackit_kms_keyring.keyring001yy.keyring_id
|
||||||
|
display_name = "msh-k-001"
|
||||||
|
protection = "software"
|
||||||
|
algorithm = "aes_256_gcm"
|
||||||
|
purpose = "symmetric_encrypt_decrypt"
|
||||||
|
access_scope = "SNA"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# data "stackitprivatepreview_sqlserverflexalpha_instance" "test" {
|
||||||
|
# project_id = var.project_id
|
||||||
|
# instance_id = var.instance_id
|
||||||
|
# region = "eu01"
|
||||||
|
# }
|
||||||
|
|
||||||
|
output "key_ring_idxx" {
|
||||||
|
value = stackit_kms_keyring.keyring001yy.id
|
||||||
|
}
|
||||||
|
|
||||||
|
output "key_id" {
|
||||||
|
value = stackit_kms_key.key001yy.id
|
||||||
|
}
|
||||||
25
sample/kms/providers.tf
Normal file
25
sample/kms/providers.tf
Normal file
|
|
@ -0,0 +1,25 @@
|
||||||
|
|
||||||
|
terraform {
|
||||||
|
required_providers {
|
||||||
|
stackit = {
|
||||||
|
source = "registry.terraform.io/stackitcloud/stackit"
|
||||||
|
version = "~> 0.70"
|
||||||
|
}
|
||||||
|
# stackitprivatepreview = {
|
||||||
|
# source = "tfregistry.sysops.stackit.rocks/mhenselin/stackitprivatepreview"
|
||||||
|
# version = "= 0.0.2-alpha"
|
||||||
|
# }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
provider "stackit" {
|
||||||
|
default_region = "eu01"
|
||||||
|
enable_beta_resources = true
|
||||||
|
service_account_key_path = "../service_account.json"
|
||||||
|
}
|
||||||
|
|
||||||
|
# provider "stackitprivatepreview" {
|
||||||
|
# default_region = "eu01"
|
||||||
|
# enable_beta_resources = true
|
||||||
|
# service_account_key_path = "../service_account.json"
|
||||||
|
# }
|
||||||
4
sample/pg_import/outputs.tf
Normal file
4
sample/pg_import/outputs.tf
Normal file
|
|
@ -0,0 +1,4 @@
|
||||||
|
#
|
||||||
|
# output "postgres_flavor" {
|
||||||
|
# value = data.stackitprivatepreview_postgresflexalpha_flavor.pgsql_flavor.flavor_id
|
||||||
|
# }
|
||||||
45
sample/pg_import/postresql.tf
Normal file
45
sample/pg_import/postresql.tf
Normal file
|
|
@ -0,0 +1,45 @@
|
||||||
|
|
||||||
|
data "stackitprivatepreview_postgresflexalpha_flavor" "pgsql_flavor" {
|
||||||
|
project_id = var.project_id
|
||||||
|
region = "eu01"
|
||||||
|
cpu = 2
|
||||||
|
ram = 4
|
||||||
|
node_type = "Single"
|
||||||
|
storage_class = "premium-perf2-stackit"
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "stackitprivatepreview_postgresflexalpha_instance" "import_for_deletion" {
|
||||||
|
project_id = var.project_id
|
||||||
|
name = "mshpetest2"
|
||||||
|
backup_schedule = "0 0 * * *"
|
||||||
|
retention_days = 45
|
||||||
|
flavor_id = data.stackitprivatepreview_postgresflexalpha_flavor.pgsql_flavor.flavor_id
|
||||||
|
replicas = 1
|
||||||
|
storage = {
|
||||||
|
# class = "premium-perf2-stackit"
|
||||||
|
performance_class = "premium-perf2-stackit"
|
||||||
|
size = 10
|
||||||
|
}
|
||||||
|
encryption = {
|
||||||
|
# key_id = stackit_kms_key.key.key_id
|
||||||
|
# keyring_id = stackit_kms_keyring.keyring.keyring_id
|
||||||
|
kek_key_id = var.key_id
|
||||||
|
kek_key_ring_id = var.keyring_id
|
||||||
|
kek_key_version = var.key_version
|
||||||
|
service_account = var.sa_email
|
||||||
|
}
|
||||||
|
network = {
|
||||||
|
acl = ["0.0.0.0/0", "193.148.160.0/19", "170.85.2.177/32"]
|
||||||
|
access_scope = "PUBLIC"
|
||||||
|
}
|
||||||
|
version = 14
|
||||||
|
}
|
||||||
|
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_postgresflexalpha_instance.import_for_deletion
|
||||||
|
identity = {
|
||||||
|
project_id = var.project_id
|
||||||
|
region = "eu01"
|
||||||
|
instance_id = "d52b5d4c-be3f-4c14-a107-330dab99fd2e"
|
||||||
|
}
|
||||||
|
}
|
||||||
25
sample/pg_import/providers.tf
Normal file
25
sample/pg_import/providers.tf
Normal file
|
|
@ -0,0 +1,25 @@
|
||||||
|
|
||||||
|
terraform {
|
||||||
|
required_providers {
|
||||||
|
# stackit = {
|
||||||
|
# source = "registry.terraform.io/stackitcloud/stackit"
|
||||||
|
# version = "~> 0.70"
|
||||||
|
# }
|
||||||
|
stackitprivatepreview = {
|
||||||
|
source = "tfregistry.sysops.stackit.rocks/mhenselin/stackitprivatepreview"
|
||||||
|
version = "> 0.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# provider "stackit" {
|
||||||
|
# default_region = "eu01"
|
||||||
|
# enable_beta_resources = true
|
||||||
|
# service_account_key_path = "./service_account.json"
|
||||||
|
# }
|
||||||
|
|
||||||
|
provider "stackitprivatepreview" {
|
||||||
|
default_region = "eu01"
|
||||||
|
enable_beta_resources = true
|
||||||
|
service_account_key_path = "../service_account.json"
|
||||||
|
}
|
||||||
11
sample/pg_import/variables.tf.example
Normal file
11
sample/pg_import/variables.tf.example
Normal file
|
|
@ -0,0 +1,11 @@
|
||||||
|
variable "project_id" {
|
||||||
|
default = "<PROJECT ID UUID>"
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "sa_email" {
|
||||||
|
default = "<SERVICE ACCOUNT EMAIL>"
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "db_username" {
|
||||||
|
default = "<DB USERNAME>"
|
||||||
|
}
|
||||||
0
sample/pg_instance/outputs.tf
Normal file
0
sample/pg_instance/outputs.tf
Normal file
17
sample/pg_instance/postresql.tf
Normal file
17
sample/pg_instance/postresql.tf
Normal file
|
|
@ -0,0 +1,17 @@
|
||||||
|
|
||||||
|
data "stackitprivatepreview_postgresflexalpha_flavor" "pgsql_flavor" {
|
||||||
|
project_id = var.project_id
|
||||||
|
region = "eu01"
|
||||||
|
cpu = 2
|
||||||
|
ram = 4
|
||||||
|
node_type = "Single"
|
||||||
|
storage_class = "premium-perf2-stackit"
|
||||||
|
}
|
||||||
|
data "stackitprivatepreview_postgresflexalpha_flavor" "pgsql_flavor2"{
|
||||||
|
project_id = var.project_id
|
||||||
|
region = "eu01"
|
||||||
|
cpu = 2
|
||||||
|
ram = 4
|
||||||
|
node_type = "Single"
|
||||||
|
storage_class = "premium-perf2-stackit"
|
||||||
|
}
|
||||||
25
sample/pg_instance/providers.tf
Normal file
25
sample/pg_instance/providers.tf
Normal file
|
|
@ -0,0 +1,25 @@
|
||||||
|
|
||||||
|
terraform {
|
||||||
|
required_providers {
|
||||||
|
# stackit = {
|
||||||
|
# source = "registry.terraform.io/stackitcloud/stackit"
|
||||||
|
# version = "~> 0.70"
|
||||||
|
# }
|
||||||
|
stackitprivatepreview = {
|
||||||
|
source = "tfregistry.sysops.stackit.rocks/mhenselin/stackitprivatepreview"
|
||||||
|
version = "> 0.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# provider "stackit" {
|
||||||
|
# default_region = "eu01"
|
||||||
|
# enable_beta_resources = true
|
||||||
|
# service_account_key_path = "./service_account.json"
|
||||||
|
# }
|
||||||
|
|
||||||
|
provider "stackitprivatepreview" {
|
||||||
|
default_region = "eu01"
|
||||||
|
enable_beta_resources = true
|
||||||
|
service_account_key_path = "/home/henselinm/Development/PTLS/terraform-provider-stackit-MSH/sample/pg_instance/service_account.json"
|
||||||
|
}
|
||||||
11
sample/pg_instance/variables.tf.example
Normal file
11
sample/pg_instance/variables.tf.example
Normal file
|
|
@ -0,0 +1,11 @@
|
||||||
|
variable "project_id" {
|
||||||
|
default = "<PROJECT ID UUID>"
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "sa_email" {
|
||||||
|
default = "<SERVICE ACCOUNT EMAIL>"
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "db_username" {
|
||||||
|
default = "<DB USERNAME>"
|
||||||
|
}
|
||||||
4
sample/postgres/outputs.tf
Normal file
4
sample/postgres/outputs.tf
Normal file
|
|
@ -0,0 +1,4 @@
|
||||||
|
|
||||||
|
output "postgres_flavor" {
|
||||||
|
value = data.stackitprivatepreview_postgresflexalpha_flavor.pgsql_flavor.flavor_id
|
||||||
|
}
|
||||||
125
sample/postgres/postresql.tf
Normal file
125
sample/postgres/postresql.tf
Normal file
|
|
@ -0,0 +1,125 @@
|
||||||
|
|
||||||
|
data "stackitprivatepreview_postgresflexalpha_flavor" "pgsql_flavor" {
|
||||||
|
project_id = var.project_id
|
||||||
|
region = "eu01"
|
||||||
|
cpu = 2
|
||||||
|
ram = 4
|
||||||
|
node_type = "Single"
|
||||||
|
storage_class = "premium-perf2-stackit"
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "stackitprivatepreview_postgresflexalpha_instance" "msh-sna-pe-example" {
|
||||||
|
project_id = var.project_id
|
||||||
|
name = "mshpetest2"
|
||||||
|
backup_schedule = "0 0 * * *"
|
||||||
|
retention_days = 45
|
||||||
|
# flavor_id = data.stackitprivatepreview_postgresflexalpha_flavor.pgsql_flavor.flavor_id
|
||||||
|
flavor = {
|
||||||
|
cpu = 2
|
||||||
|
ram = 4
|
||||||
|
}
|
||||||
|
replicas = 1
|
||||||
|
storage = {
|
||||||
|
# class = "premium-perf2-stackit"
|
||||||
|
performance_class = "premium-perf2-stackit"
|
||||||
|
size = 10
|
||||||
|
}
|
||||||
|
encryption = {
|
||||||
|
# key_id = stackit_kms_key.key.key_id
|
||||||
|
# keyring_id = stackit_kms_keyring.keyring.keyring_id
|
||||||
|
kek_key_id = var.key_id
|
||||||
|
kek_key_ring_id = var.keyring_id
|
||||||
|
kek_key_version = var.key_version
|
||||||
|
service_account = var.sa_email
|
||||||
|
}
|
||||||
|
network = {
|
||||||
|
acl = ["0.0.0.0/0", "193.148.160.0/19", "170.85.2.177/32"]
|
||||||
|
access_scope = "PUBLIC"
|
||||||
|
}
|
||||||
|
version = 17
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "stackitprivatepreview_postgresflexalpha_instance" "msh-sna-pe-example2" {
|
||||||
|
project_id = var.project_id
|
||||||
|
name = "mshpetest2-1"
|
||||||
|
backup_schedule = "0 0 * * *"
|
||||||
|
retention_days = 45
|
||||||
|
flavor_id = data.stackitprivatepreview_postgresflexalpha_flavor.pgsql_flavor.flavor_id
|
||||||
|
replicas = 1
|
||||||
|
storage = {
|
||||||
|
# class = "premium-perf2-stackit"
|
||||||
|
performance_class = "premium-perf2-stackit"
|
||||||
|
size = 10
|
||||||
|
}
|
||||||
|
encryption = {
|
||||||
|
# key_id = stackit_kms_key.key.key_id
|
||||||
|
# keyring_id = stackit_kms_keyring.keyring.keyring_id
|
||||||
|
kek_key_id = var.key_id
|
||||||
|
kek_key_ring_id = var.keyring_id
|
||||||
|
kek_key_version = var.key_version
|
||||||
|
service_account = var.sa_email
|
||||||
|
}
|
||||||
|
network = {
|
||||||
|
acl = ["0.0.0.0/0", "193.148.160.0/19", "170.85.2.177/32"]
|
||||||
|
access_scope = "SNA"
|
||||||
|
}
|
||||||
|
version = 16
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "stackitprivatepreview_postgresflexalpha_user" "ptlsdbadminuser" {
|
||||||
|
project_id = var.project_id
|
||||||
|
instance_id = stackitprivatepreview_postgresflexalpha_instance.msh-sna-pe-example.instance_id
|
||||||
|
name = var.db_admin_username
|
||||||
|
roles = ["createdb", "login"]
|
||||||
|
# roles = ["createdb", "login", "login"]
|
||||||
|
# roles = ["createdb", "login", "createrole"]
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "stackitprivatepreview_postgresflexalpha_user" "ptlsdbadminuser2" {
|
||||||
|
project_id = var.project_id
|
||||||
|
instance_id = stackitprivatepreview_postgresflexalpha_instance.msh-sna-pe-example2.instance_id
|
||||||
|
name = var.db_admin_username
|
||||||
|
roles = ["createdb", "login"]
|
||||||
|
# roles = ["createdb", "login", "createrole"]
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "stackitprivatepreview_postgresflexalpha_user" "ptlsdbuser" {
|
||||||
|
project_id = var.project_id
|
||||||
|
instance_id = stackitprivatepreview_postgresflexalpha_instance.msh-sna-pe-example.instance_id
|
||||||
|
name = var.db_name
|
||||||
|
roles = ["login"]
|
||||||
|
# roles = ["createdb", "login", "createrole"]
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "stackitprivatepreview_postgresflexalpha_database" "example" {
|
||||||
|
count = 5
|
||||||
|
depends_on = [stackitprivatepreview_postgresflexalpha_user.ptlsdbadminuser]
|
||||||
|
project_id = var.project_id
|
||||||
|
instance_id = stackitprivatepreview_postgresflexalpha_instance.msh-sna-pe-example.instance_id
|
||||||
|
name = "${var.db_name}${count.index}"
|
||||||
|
owner = var.db_admin_username
|
||||||
|
}
|
||||||
|
|
||||||
|
# data "stackitprivatepreview_postgresflexalpha_instance" "datapsql" {
|
||||||
|
# project_id = var.project_id
|
||||||
|
# instance_id = var.instance_id
|
||||||
|
# region = "eu01"
|
||||||
|
# }
|
||||||
|
|
||||||
|
# output "psql_instance_id" {
|
||||||
|
# value = data.stackitprivatepreview_postgresflexalpha_instance.datapsql.instance_id
|
||||||
|
# }
|
||||||
|
|
||||||
|
output "psql_user_password" {
|
||||||
|
value = stackitprivatepreview_postgresflexalpha_user.ptlsdbuser.password
|
||||||
|
sensitive = true
|
||||||
|
}
|
||||||
|
|
||||||
|
# output "psql_user_conn" {
|
||||||
|
# value = stackitprivatepreview_postgresflexalpha_user.ptlsdbuser.connection.host
|
||||||
|
# sensitive = true
|
||||||
|
# }
|
||||||
|
|
||||||
|
output "determined_flavor_id" {
|
||||||
|
value = stackitprivatepreview_postgresflexalpha_instance.msh-sna-pe-example.flavor_id
|
||||||
|
}
|
||||||
25
sample/postgres/providers.tf
Normal file
25
sample/postgres/providers.tf
Normal file
|
|
@ -0,0 +1,25 @@
|
||||||
|
|
||||||
|
terraform {
|
||||||
|
required_providers {
|
||||||
|
# stackit = {
|
||||||
|
# source = "registry.terraform.io/stackitcloud/stackit"
|
||||||
|
# version = "~> 0.70"
|
||||||
|
# }
|
||||||
|
stackitprivatepreview = {
|
||||||
|
source = "tfregistry.sysops.stackit.rocks/mhenselin/stackitprivatepreview"
|
||||||
|
version = "> 0.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# provider "stackit" {
|
||||||
|
# default_region = "eu01"
|
||||||
|
# enable_beta_resources = true
|
||||||
|
# service_account_key_path = "./service_account.json"
|
||||||
|
# }
|
||||||
|
|
||||||
|
provider "stackitprivatepreview" {
|
||||||
|
default_region = "eu01"
|
||||||
|
enable_beta_resources = true
|
||||||
|
service_account_key_path = "../service_account.json"
|
||||||
|
}
|
||||||
11
sample/postgres/variables.tf.example
Normal file
11
sample/postgres/variables.tf.example
Normal file
|
|
@ -0,0 +1,11 @@
|
||||||
|
variable "project_id" {
|
||||||
|
default = "<PROJECT ID UUID>"
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "sa_email" {
|
||||||
|
default = "<SERVICE ACCOUNT EMAIL>"
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "db_username" {
|
||||||
|
default = "<DB USERNAME>"
|
||||||
|
}
|
||||||
13
sample/sqlserver/flavor.tf
Normal file
13
sample/sqlserver/flavor.tf
Normal file
|
|
@ -0,0 +1,13 @@
|
||||||
|
|
||||||
|
data "stackitprivatepreview_sqlserverflexbeta_flavor" "sqlserver_flavor" {
|
||||||
|
project_id = var.project_id
|
||||||
|
region = "eu01"
|
||||||
|
cpu = 4
|
||||||
|
ram = 16
|
||||||
|
node_type = "Single"
|
||||||
|
storage_class = "premium-perf2-stackit"
|
||||||
|
}
|
||||||
|
|
||||||
|
output "sqlserver_flavor" {
|
||||||
|
value = data.stackitprivatepreview_sqlserverflexbeta_flavor.sqlserver_flavor.flavor_id
|
||||||
|
}
|
||||||
25
sample/sqlserver/providers.tf
Normal file
25
sample/sqlserver/providers.tf
Normal file
|
|
@ -0,0 +1,25 @@
|
||||||
|
|
||||||
|
terraform {
|
||||||
|
required_providers {
|
||||||
|
# stackit = {
|
||||||
|
# source = "registry.terraform.io/stackitcloud/stackit"
|
||||||
|
# version = "~> 0.70"
|
||||||
|
# }
|
||||||
|
stackitprivatepreview = {
|
||||||
|
source = "tfregistry.sysops.stackit.rocks/mhenselin/stackitprivatepreview"
|
||||||
|
version = "> 0.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# provider "stackit" {
|
||||||
|
# default_region = "eu01"
|
||||||
|
# enable_beta_resources = true
|
||||||
|
# service_account_key_path = "../service_account.json"
|
||||||
|
# }
|
||||||
|
|
||||||
|
provider "stackitprivatepreview" {
|
||||||
|
default_region = "eu01"
|
||||||
|
enable_beta_resources = true
|
||||||
|
service_account_key_path = "../service_account.json"
|
||||||
|
}
|
||||||
63
sample/sqlserver/sqlserver.tf
Normal file
63
sample/sqlserver/sqlserver.tf
Normal file
|
|
@ -0,0 +1,63 @@
|
||||||
|
# resource "stackit_kms_keyring" "keyring" {
|
||||||
|
# project_id = var.project_id
|
||||||
|
# display_name = "msh-keyring01"
|
||||||
|
# description = "This is a test keyring for private endpoints"
|
||||||
|
# }
|
||||||
|
#
|
||||||
|
# resource "stackit_kms_key" "key" {
|
||||||
|
# project_id = var.project_id
|
||||||
|
# keyring_id = stackit_kms_keyring.keyring.keyring_id
|
||||||
|
# display_name = "msh-key01"
|
||||||
|
# protection = "software"
|
||||||
|
# algorithm = "aes_256_gcm"
|
||||||
|
# purpose = "symmetric_encrypt_decrypt"
|
||||||
|
# access_scope = "SNA"
|
||||||
|
# }
|
||||||
|
#
|
||||||
|
# output "keyid" {
|
||||||
|
# value = stackit_kms_key.key.key_id
|
||||||
|
# }
|
||||||
|
|
||||||
|
resource "stackitprivatepreview_sqlserverflexbeta_instance" "msh-beta-sna-001" {
|
||||||
|
project_id = var.project_id
|
||||||
|
name = "msh-beta-sna-001"
|
||||||
|
backup_schedule = "0 3 * * *"
|
||||||
|
retention_days = 31
|
||||||
|
flavor_id = data.stackitprivatepreview_sqlserverflexbeta_flavor.sqlserver_flavor.flavor_id
|
||||||
|
storage = {
|
||||||
|
class = "premium-perf2-stackit"
|
||||||
|
size = 10
|
||||||
|
}
|
||||||
|
version = 2022
|
||||||
|
encryption = {
|
||||||
|
#key_id = stackit_kms_key.key.key_id
|
||||||
|
#keyring_id = stackit_kms_keyring.keyring.keyring_id
|
||||||
|
#key_version = 1
|
||||||
|
# key with scope public
|
||||||
|
# kek_key_id = "fe039bcf-8d7b-431a-801d-9e81371a6b7b"
|
||||||
|
kek_key_id = "c6878f92-ce55-4b79-8236-ba9d001d7967" # msh-k-001
|
||||||
|
# key_id = var.key_id
|
||||||
|
# kek_key_ring_id = var.keyring_id
|
||||||
|
kek_key_ring_id = "0dea3f5f-9947-4dda-a9d3-18418832cefe" # msh-kr-sna01
|
||||||
|
kek_key_version = var.key_version
|
||||||
|
service_account = var.sa_email
|
||||||
|
}
|
||||||
|
network = {
|
||||||
|
acl = ["0.0.0.0/0", "193.148.160.0/19"]
|
||||||
|
access_scope = "SNA"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "stackitprivatepreview_sqlserverflexbeta_user" "betauser" {
|
||||||
|
project_id = var.project_id
|
||||||
|
instance_id = stackitprivatepreview_sqlserverflexbeta_instance.msh-beta-sna-001.instance_id
|
||||||
|
username = "betauser"
|
||||||
|
roles = ["##STACKIT_DatabaseManager##", "##STACKIT_LoginManager##"]
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "stackitprivatepreview_sqlserverflexbeta_database" "betadb" {
|
||||||
|
project_id = var.project_id
|
||||||
|
instance_id = stackitprivatepreview_sqlserverflexbeta_instance.msh-beta-sna-001.instance_id
|
||||||
|
name = "mshtest002"
|
||||||
|
owner = stackitprivatepreview_sqlserverflexbeta_user.betauser.username
|
||||||
|
}
|
||||||
11
sample/sqlserver/variables.tf.example
Normal file
11
sample/sqlserver/variables.tf.example
Normal file
|
|
@ -0,0 +1,11 @@
|
||||||
|
variable "project_id" {
|
||||||
|
default = "<PROJECT ID UUID>"
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "sa_email" {
|
||||||
|
default = "<SERVICE ACCOUNT EMAIL>"
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "db_username" {
|
||||||
|
default = "<DB USERNAME>"
|
||||||
|
}
|
||||||
13
sample/sqlserver_beta/flavor.tf
Normal file
13
sample/sqlserver_beta/flavor.tf
Normal file
|
|
@ -0,0 +1,13 @@
|
||||||
|
#
|
||||||
|
# data "stackitprivatepreview_sqlserverflexalpha_flavor" "sqlserver_flavor" {
|
||||||
|
# project_id = var.project_id
|
||||||
|
# region = "eu01"
|
||||||
|
# cpu = 4
|
||||||
|
# ram = 16
|
||||||
|
# node_type = "Single"
|
||||||
|
# storage_class = "premium-perf2-stackit"
|
||||||
|
# }
|
||||||
|
#
|
||||||
|
# output "sqlserver_flavor" {
|
||||||
|
# value = data.stackitprivatepreview_sqlserverflexalpha_flavor.sqlserver_flavor.flavor_id
|
||||||
|
# }
|
||||||
9
sample/sqlserver_beta/postgres.tf
Normal file
9
sample/sqlserver_beta/postgres.tf
Normal file
|
|
@ -0,0 +1,9 @@
|
||||||
|
|
||||||
|
data "stackitprivatepreview_postgresflexalpha_flavor" "pgsql_flavor" {
|
||||||
|
project_id = var.project_id
|
||||||
|
region = "eu01"
|
||||||
|
cpu = 2
|
||||||
|
ram = 4
|
||||||
|
node_type = "Single"
|
||||||
|
storage_class = "premium-perf2-stackit"
|
||||||
|
}
|
||||||
25
sample/sqlserver_beta/providers.tf
Normal file
25
sample/sqlserver_beta/providers.tf
Normal file
|
|
@ -0,0 +1,25 @@
|
||||||
|
|
||||||
|
terraform {
|
||||||
|
required_providers {
|
||||||
|
# stackit = {
|
||||||
|
# source = "registry.terraform.io/stackitcloud/stackit"
|
||||||
|
# version = "~> 0.70"
|
||||||
|
# }
|
||||||
|
stackitprivatepreview = {
|
||||||
|
source = "tfregistry.sysops.stackit.rocks/mhenselin/stackitprivatepreview"
|
||||||
|
version = "> 0.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# provider "stackit" {
|
||||||
|
# default_region = "eu01"
|
||||||
|
# enable_beta_resources = true
|
||||||
|
# service_account_key_path = "../service_account.json"
|
||||||
|
# }
|
||||||
|
|
||||||
|
provider "stackitprivatepreview" {
|
||||||
|
default_region = "eu01"
|
||||||
|
enable_beta_resources = true
|
||||||
|
service_account_key_path = "../service_account.json"
|
||||||
|
}
|
||||||
116
sample/sqlserver_beta/sqlserver.tf
Normal file
116
sample/sqlserver_beta/sqlserver.tf
Normal file
|
|
@ -0,0 +1,116 @@
|
||||||
|
data "stackitprivatepreview_sqlserverflexbeta_flavor" "sqlserver_flavor" {
|
||||||
|
project_id = var.project_id
|
||||||
|
region = "eu01"
|
||||||
|
cpu = 4
|
||||||
|
ram = 16
|
||||||
|
node_type = "Single"
|
||||||
|
storage_class = "premium-perf2-stackit"
|
||||||
|
}
|
||||||
|
|
||||||
|
data "stackitprivatepreview_sqlserverflexbeta_flavor" "sqlserver_flavor_2" {
|
||||||
|
project_id = var.project_id
|
||||||
|
region = "eu01"
|
||||||
|
cpu = 4
|
||||||
|
ram = 32
|
||||||
|
node_type = "Replica"
|
||||||
|
storage_class = "premium-perf2-stackit"
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "stackitprivatepreview_sqlserverflexbeta_instance" "msh-beta-nosna-001" {
|
||||||
|
project_id = var.project_id
|
||||||
|
name = "msh-beta-nosna-001-renamed"
|
||||||
|
backup_schedule = "0 3 * * *"
|
||||||
|
retention_days = 31
|
||||||
|
flavor_id = data.stackitprivatepreview_sqlserverflexbeta_flavor.sqlserver_flavor.flavor_id
|
||||||
|
storage = {
|
||||||
|
class = "premium-perf2-stackit"
|
||||||
|
size = 50
|
||||||
|
}
|
||||||
|
version = 2022
|
||||||
|
network = {
|
||||||
|
acl = ["0.0.0.0/0", "193.148.160.0/19"]
|
||||||
|
access_scope = "PUBLIC"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "stackitprivatepreview_sqlserverflexbeta_instance" "msh-beta-sna-001" {
|
||||||
|
project_id = var.project_id
|
||||||
|
name = "msh-beta-sna-001"
|
||||||
|
backup_schedule = "0 3 * * *"
|
||||||
|
retention_days = 31
|
||||||
|
flavor_id = data.stackitprivatepreview_sqlserverflexbeta_flavor.sqlserver_flavor.flavor_id
|
||||||
|
storage = {
|
||||||
|
class = "premium-perf2-stackit"
|
||||||
|
size = 5
|
||||||
|
}
|
||||||
|
version = 2022
|
||||||
|
encryption = {
|
||||||
|
#key_id = stackit_kms_key.key.key_id
|
||||||
|
#keyring_id = stackit_kms_keyring.keyring.keyring_id
|
||||||
|
#key_version = 1
|
||||||
|
# key with scope public
|
||||||
|
kek_key_id = "fe039bcf-8d7b-431a-801d-9e81371a6b7b"
|
||||||
|
# key_id = var.key_id
|
||||||
|
kek_key_ring_id = var.keyring_id
|
||||||
|
kek_key_version = var.key_version
|
||||||
|
service_account = var.sa_email
|
||||||
|
}
|
||||||
|
network = {
|
||||||
|
acl = ["0.0.0.0/0", "193.148.160.0/19"]
|
||||||
|
access_scope = "SNA"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "stackitprivatepreview_sqlserverflexbeta_user" "exampleuseruno" {
|
||||||
|
project_id = var.project_id
|
||||||
|
instance_id = stackitprivatepreview_sqlserverflexbeta_instance.msh-beta-nosna-001.instance_id
|
||||||
|
username = "exampleuserdue"
|
||||||
|
roles = ["##STACKIT_ProcessManager##", "##STACKIT_LoginManager##", "##STACKIT_ServerManager##"]
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "stackitprivatepreview_sqlserverflexbeta_user" "exampleuser" {
|
||||||
|
project_id = var.project_id
|
||||||
|
instance_id = stackitprivatepreview_sqlserverflexbeta_instance.msh-beta-nosna-001.instance_id
|
||||||
|
username = "exampleuser"
|
||||||
|
roles = ["##STACKIT_LoginManager##"]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
resource "stackitprivatepreview_sqlserverflexbeta_database" "mshtest002" {
|
||||||
|
project_id = var.project_id
|
||||||
|
instance_id = stackitprivatepreview_sqlserverflexbeta_instance.msh-beta-nosna-001.instance_id
|
||||||
|
name = "mshtest002"
|
||||||
|
# owner = "dbuser"
|
||||||
|
owner = stackitprivatepreview_sqlserverflexbeta_user.exampleuseruno.username
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# data "stackitprivatepreview_sqlserverflexbeta_database" "example" {
|
||||||
|
# project_id = var.project_id
|
||||||
|
# region = "eu01"
|
||||||
|
# instance_id = "b3b63d0c-35bf-4804-84ea-5abec2a8ae58"
|
||||||
|
# database_name = "mshtest001"
|
||||||
|
# }
|
||||||
|
|
||||||
|
# output "dbdetails" {
|
||||||
|
# value = data.stackitprivatepreview_sqlserverflexbeta_database.example
|
||||||
|
# }
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
# resource "stackitprivatepreview_sqlserverflexbeta_database" "mshtest" {
|
||||||
|
# project_id = var.project_id
|
||||||
|
# instance_id = "b3b63d0c-35bf-4804-84ea-5abec2a8ae58"
|
||||||
|
# name = "mshtest"
|
||||||
|
# owner = "dbuser"
|
||||||
|
# }
|
||||||
|
#
|
||||||
|
# import {
|
||||||
|
# to = stackitprivatepreview_sqlserverflexbeta_database.mshtest
|
||||||
|
# identity = {
|
||||||
|
# project_id = var.project_id
|
||||||
|
# region = "eu01"
|
||||||
|
# instance_id = "b3b63d0c-35bf-4804-84ea-5abec2a8ae58"
|
||||||
|
# database_name = "mshtest"
|
||||||
|
# }
|
||||||
|
# }
|
||||||
11
sample/sqlserver_beta/variables.tf.example
Normal file
11
sample/sqlserver_beta/variables.tf.example
Normal file
|
|
@ -0,0 +1,11 @@
|
||||||
|
variable "project_id" {
|
||||||
|
default = "<PROJECT ID UUID>"
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "sa_email" {
|
||||||
|
default = "<SERVICE ACCOUNT EMAIL>"
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "db_username" {
|
||||||
|
default = "<DB USERNAME>"
|
||||||
|
}
|
||||||
52
sample/tf.sh
Executable file
52
sample/tf.sh
Executable file
|
|
@ -0,0 +1,52 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# ./tf.sh apply > >(tee -a stdout.log) 2> >(tee -a stderr.log >&2)
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
echo "$0 usage:" && grep "[[:space:]].)\ #" "$0" | sed 's/#//' | sed -r 's/([a-z])\)/-\1/';
|
||||||
|
exit 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
[ $# -eq 0 ] && usage
|
||||||
|
|
||||||
|
CONFIG_FOLDER=$(dirname "$0")
|
||||||
|
# BINARY=terraform
|
||||||
|
BINARY=tofu
|
||||||
|
|
||||||
|
ADD=""
|
||||||
|
|
||||||
|
while getopts ":b:hdirt" arg; do
|
||||||
|
case $arg in
|
||||||
|
b) # Set binary (default is terraform).
|
||||||
|
BINARY=${OPTARG}
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
d) # Set log level to DEBUG.
|
||||||
|
TF_LOG=DEBUG
|
||||||
|
export TF_LOG
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
i) # Set log level to INFO.
|
||||||
|
TF_LOG=INFO
|
||||||
|
export TF_LOG
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
r) # Set log level to INFO.
|
||||||
|
ADD="-refresh-only"
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
t) # Set log level to TRACE.
|
||||||
|
TF_LOG=TRACE
|
||||||
|
export TF_LOG
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
h | *) # Display help.
|
||||||
|
usage
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
TERRAFORM_CONFIG=${CONFIG_FOLDER}/config.tfrc
|
||||||
|
export TERRAFORM_CONFIG
|
||||||
|
|
||||||
|
${BINARY} "$@" ${ADD}
|
||||||
|
|
@ -2,7 +2,7 @@ provider:
|
||||||
name: stackitprivatepreview
|
name: stackitprivatepreview
|
||||||
|
|
||||||
data_sources:
|
data_sources:
|
||||||
collation:
|
collations:
|
||||||
read:
|
read:
|
||||||
path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/collations
|
path: /v3beta1/projects/{projectId}/regions/{region}/instances/{instanceId}/collations
|
||||||
method: GET
|
method: GET
|
||||||
|
|
@ -3,7 +3,7 @@ provider:
|
||||||
name: stackitprivatepreview
|
name: stackitprivatepreview
|
||||||
|
|
||||||
data_sources:
|
data_sources:
|
||||||
version:
|
versions:
|
||||||
read:
|
read:
|
||||||
path: /v3beta1/projects/{projectId}/regions/{region}/versions
|
path: /v3beta1/projects/{projectId}/regions/{region}/versions
|
||||||
method: GET
|
method: GET
|
||||||
|
|
|
||||||
|
|
@ -45,7 +45,7 @@ func DatabaseDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
MarkdownDescription: "The STACKIT project ID.",
|
MarkdownDescription: "The STACKIT project ID.",
|
||||||
},
|
},
|
||||||
"region": schema.StringAttribute{
|
"region": schema.StringAttribute{
|
||||||
Required: true,
|
Optional: true,
|
||||||
Description: "The region which should be addressed",
|
Description: "The region which should be addressed",
|
||||||
MarkdownDescription: "The region which should be addressed",
|
MarkdownDescription: "The region which should be addressed",
|
||||||
Validators: []validator.String{
|
Validators: []validator.String{
|
||||||
|
|
|
||||||
|
|
@ -64,17 +64,21 @@ func mapResourceFields(source *v3alpha1api.GetDatabaseResponse, model *resourceM
|
||||||
return fmt.Errorf("model input is nil")
|
return fmt.Errorf("model input is nil")
|
||||||
}
|
}
|
||||||
|
|
||||||
var databaseId int64
|
var databaseID int64
|
||||||
if model.Id.ValueInt64() != 0 {
|
if model.DatabaseId.ValueInt64() != 0 {
|
||||||
databaseId = model.Id.ValueInt64()
|
if source.Id != 0 {
|
||||||
|
if model.DatabaseId.ValueInt64() != int64(source.Id) {
|
||||||
|
return fmt.Errorf("retrieved ID does not match known ID")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
databaseID = model.DatabaseId.ValueInt64()
|
||||||
} else if source.Id != 0 {
|
} else if source.Id != 0 {
|
||||||
databaseId = int64(source.Id)
|
databaseID = int64(source.Id)
|
||||||
} else {
|
} else {
|
||||||
return fmt.Errorf("database id not present")
|
return fmt.Errorf("database id not present")
|
||||||
}
|
}
|
||||||
|
|
||||||
model.Id = types.Int64Value(databaseId)
|
model.DatabaseId = types.Int64Value(databaseID)
|
||||||
model.DatabaseId = types.Int64Value(databaseId)
|
|
||||||
model.Name = types.StringValue(source.GetName())
|
model.Name = types.StringValue(source.GetName())
|
||||||
model.Owner = types.StringValue(cleanString(source.Owner))
|
model.Owner = types.StringValue(cleanString(source.Owner))
|
||||||
return nil
|
return nil
|
||||||
|
|
|
||||||
|
|
@ -160,7 +160,7 @@ func TestMapResourceFields(t *testing.T) {
|
||||||
},
|
},
|
||||||
expected: expected{
|
expected: expected{
|
||||||
model: &resourceModel{
|
model: &resourceModel{
|
||||||
Id: types.Int64Value(1),
|
Id: types.StringNull(),
|
||||||
Name: types.StringValue("my-db"),
|
Name: types.StringValue("my-db"),
|
||||||
Owner: types.StringValue("my-owner"),
|
Owner: types.StringValue("my-owner"),
|
||||||
DatabaseId: types.Int64Value(1),
|
DatabaseId: types.Int64Value(1),
|
||||||
|
|
|
||||||
|
|
@ -11,9 +11,9 @@ import (
|
||||||
|
|
||||||
"github.com/hashicorp/terraform-plugin-framework/path"
|
"github.com/hashicorp/terraform-plugin-framework/path"
|
||||||
"github.com/hashicorp/terraform-plugin-framework/resource"
|
"github.com/hashicorp/terraform-plugin-framework/resource"
|
||||||
"github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
|
|
||||||
"github.com/hashicorp/terraform-plugin-framework/types"
|
"github.com/hashicorp/terraform-plugin-framework/types"
|
||||||
"github.com/hashicorp/terraform-plugin-log/tflog"
|
"github.com/hashicorp/terraform-plugin-log/tflog"
|
||||||
|
"github.com/stackitcloud/stackit-sdk-go/core/oapierror"
|
||||||
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
|
||||||
|
|
@ -30,11 +30,6 @@ var (
|
||||||
_ resource.ResourceWithConfigure = &databaseResource{}
|
_ resource.ResourceWithConfigure = &databaseResource{}
|
||||||
_ resource.ResourceWithImportState = &databaseResource{}
|
_ resource.ResourceWithImportState = &databaseResource{}
|
||||||
_ resource.ResourceWithModifyPlan = &databaseResource{}
|
_ resource.ResourceWithModifyPlan = &databaseResource{}
|
||||||
_ resource.ResourceWithIdentity = &databaseResource{}
|
|
||||||
|
|
||||||
// Error message constants
|
|
||||||
extractErrorSummary = "extracting failed"
|
|
||||||
extractErrorMessage = "Extracting identity data: %v"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// NewDatabaseResource is a helper function to simplify the provider implementation.
|
// NewDatabaseResource is a helper function to simplify the provider implementation.
|
||||||
|
|
@ -45,14 +40,6 @@ func NewDatabaseResource() resource.Resource {
|
||||||
// resourceModel describes the resource data model.
|
// resourceModel describes the resource data model.
|
||||||
type resourceModel = postgresflexalphaResGen.DatabaseModel
|
type resourceModel = postgresflexalphaResGen.DatabaseModel
|
||||||
|
|
||||||
// DatabaseResourceIdentityModel describes the resource's identity attributes.
|
|
||||||
type DatabaseResourceIdentityModel struct {
|
|
||||||
ProjectID types.String `tfsdk:"project_id"`
|
|
||||||
Region types.String `tfsdk:"region"`
|
|
||||||
InstanceID types.String `tfsdk:"instance_id"`
|
|
||||||
DatabaseID types.Int64 `tfsdk:"database_id"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// databaseResource is the resource implementation.
|
// databaseResource is the resource implementation.
|
||||||
type databaseResource struct {
|
type databaseResource struct {
|
||||||
client *v3alpha1api.APIClient
|
client *v3alpha1api.APIClient
|
||||||
|
|
@ -138,30 +125,6 @@ func (r *databaseResource) Schema(ctx context.Context, _ resource.SchemaRequest,
|
||||||
resp.Schema = s
|
resp.Schema = s
|
||||||
}
|
}
|
||||||
|
|
||||||
// IdentitySchema defines the schema for the resource's identity attributes.
|
|
||||||
func (r *databaseResource) IdentitySchema(
|
|
||||||
_ context.Context,
|
|
||||||
_ resource.IdentitySchemaRequest,
|
|
||||||
response *resource.IdentitySchemaResponse,
|
|
||||||
) {
|
|
||||||
response.IdentitySchema = identityschema.Schema{
|
|
||||||
Attributes: map[string]identityschema.Attribute{
|
|
||||||
"project_id": identityschema.StringAttribute{
|
|
||||||
RequiredForImport: true,
|
|
||||||
},
|
|
||||||
"region": identityschema.StringAttribute{
|
|
||||||
RequiredForImport: true,
|
|
||||||
},
|
|
||||||
"instance_id": identityschema.StringAttribute{
|
|
||||||
RequiredForImport: true,
|
|
||||||
},
|
|
||||||
"database_id": identityschema.Int64Attribute{
|
|
||||||
RequiredForImport: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create creates the resource and sets the initial Terraform state.
|
// Create creates the resource and sets the initial Terraform state.
|
||||||
func (r *databaseResource) Create(
|
func (r *databaseResource) Create(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
|
|
@ -178,12 +141,12 @@ func (r *databaseResource) Create(
|
||||||
|
|
||||||
ctx = core.InitProviderContext(ctx)
|
ctx = core.InitProviderContext(ctx)
|
||||||
|
|
||||||
projectId := model.ProjectId.ValueString()
|
projectID := model.ProjectId.ValueString()
|
||||||
region := model.Region.ValueString()
|
region := model.Region.ValueString()
|
||||||
instanceId := model.InstanceId.ValueString()
|
instanceID := model.InstanceId.ValueString()
|
||||||
|
|
||||||
ctx = tflog.SetField(ctx, "project_id", projectId)
|
ctx = tflog.SetField(ctx, "project_id", projectID)
|
||||||
ctx = tflog.SetField(ctx, "instance_id", instanceId)
|
ctx = tflog.SetField(ctx, "instance_id", instanceID)
|
||||||
ctx = tflog.SetField(ctx, "region", region)
|
ctx = tflog.SetField(ctx, "region", region)
|
||||||
|
|
||||||
// Generate API request body from model
|
// Generate API request body from model
|
||||||
|
|
@ -200,9 +163,9 @@ func (r *databaseResource) Create(
|
||||||
// Create new database
|
// Create new database
|
||||||
databaseResp, err := r.client.DefaultAPI.CreateDatabaseRequest(
|
databaseResp, err := r.client.DefaultAPI.CreateDatabaseRequest(
|
||||||
ctx,
|
ctx,
|
||||||
projectId,
|
projectID,
|
||||||
region,
|
region,
|
||||||
instanceId,
|
instanceID,
|
||||||
).CreateDatabaseRequestPayload(*payload).Execute()
|
).CreateDatabaseRequestPayload(*payload).Execute()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
core.LogAndAddError(ctx, &resp.Diagnostics, funcErrorSummary, fmt.Sprintf("Calling API: %v", err))
|
core.LogAndAddError(ctx, &resp.Diagnostics, funcErrorSummary, fmt.Sprintf("Calling API: %v", err))
|
||||||
|
|
@ -219,25 +182,35 @@ func (r *databaseResource) Create(
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
databaseId := int64(*dbID)
|
databaseID := int64(*dbID)
|
||||||
ctx = tflog.SetField(ctx, "database_id", databaseId)
|
databaseIDString := strconv.Itoa(int(*dbID))
|
||||||
|
|
||||||
|
ctx = tflog.SetField(ctx, "database_id", databaseID)
|
||||||
ctx = core.LogResponse(ctx)
|
ctx = core.LogResponse(ctx)
|
||||||
|
|
||||||
// Save identity into Terraform state
|
model.DatabaseId = types.Int64Value(databaseID)
|
||||||
identity := DatabaseResourceIdentityModel{
|
model.Id = utils.BuildInternalTerraformId(projectID, region, instanceID, databaseIDString)
|
||||||
ProjectID: types.StringValue(projectId),
|
|
||||||
Region: types.StringValue(region),
|
|
||||||
InstanceID: types.StringValue(instanceId),
|
|
||||||
DatabaseID: types.Int64Value(databaseId),
|
|
||||||
}
|
|
||||||
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
|
|
||||||
if resp.Diagnostics.HasError() {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
database, err := postgresflexalphaWait.GetDatabaseByIdWaitHandler(ctx, r.client.DefaultAPI, projectId, instanceId, region, databaseId).
|
// Set data returned by API in id
|
||||||
SetTimeout(15 * time.Minute).
|
resp.Diagnostics.Append(
|
||||||
SetSleepBeforeWait(15 * time.Second).
|
resp.State.SetAttribute(
|
||||||
|
ctx,
|
||||||
|
path.Root("database_id"),
|
||||||
|
databaseID,
|
||||||
|
)...,
|
||||||
|
)
|
||||||
|
// Set data returned by API in id
|
||||||
|
resp.Diagnostics.Append(
|
||||||
|
resp.State.SetAttribute(
|
||||||
|
ctx,
|
||||||
|
path.Root("id"),
|
||||||
|
model.Id,
|
||||||
|
)...,
|
||||||
|
)
|
||||||
|
|
||||||
|
database, err := postgresflexalphaWait.GetDatabaseByIdWaitHandler(ctx, r.client.DefaultAPI, projectID, instanceID, region, databaseID).
|
||||||
|
SetTimeout(30 * time.Minute).
|
||||||
|
SetSleepBeforeWait(10 * time.Second).
|
||||||
WaitWithContext(ctx)
|
WaitWithContext(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
core.LogAndAddError(
|
core.LogAndAddError(
|
||||||
|
|
@ -284,19 +257,30 @@ func (r *databaseResource) Read(
|
||||||
|
|
||||||
ctx = core.InitProviderContext(ctx)
|
ctx = core.InitProviderContext(ctx)
|
||||||
|
|
||||||
projectId := model.ProjectId.ValueString()
|
projectID := model.ProjectId.ValueString()
|
||||||
instanceId := model.InstanceId.ValueString()
|
instanceID := model.InstanceId.ValueString()
|
||||||
region := model.Region.ValueString()
|
region := model.Region.ValueString()
|
||||||
databaseId := model.DatabaseId.ValueInt64()
|
databaseID := model.DatabaseId.ValueInt64()
|
||||||
|
|
||||||
ctx = tflog.SetField(ctx, "project_id", projectId)
|
databaseIDString := strconv.Itoa(int(databaseID))
|
||||||
ctx = tflog.SetField(ctx, "instance_id", instanceId)
|
|
||||||
|
ctx = tflog.SetField(ctx, "project_id", projectID)
|
||||||
|
ctx = tflog.SetField(ctx, "instance_id", instanceID)
|
||||||
ctx = tflog.SetField(ctx, "region", region)
|
ctx = tflog.SetField(ctx, "region", region)
|
||||||
ctx = tflog.SetField(ctx, "database_id", databaseId)
|
ctx = tflog.SetField(ctx, "database_id", databaseID)
|
||||||
|
|
||||||
databaseResp, err := postgresflexalphaWait.GetDatabaseByIdWaitHandler(ctx, r.client.DefaultAPI, projectId, instanceId, region, databaseId).
|
// Set data returned by API in id
|
||||||
SetTimeout(15 * time.Minute).
|
resp.Diagnostics.Append(
|
||||||
SetSleepBeforeWait(15 * time.Second).
|
resp.State.SetAttribute(
|
||||||
|
ctx,
|
||||||
|
path.Root("id"),
|
||||||
|
utils.BuildInternalTerraformId(projectID, region, instanceID, databaseIDString),
|
||||||
|
)...,
|
||||||
|
)
|
||||||
|
|
||||||
|
databaseResp, err := postgresflexalphaWait.GetDatabaseByIdWaitHandler(ctx, r.client.DefaultAPI, projectID, instanceID, region, databaseID).
|
||||||
|
SetTimeout(30 * time.Minute).
|
||||||
|
SetSleepBeforeWait(10 * time.Second).
|
||||||
WaitWithContext(ctx)
|
WaitWithContext(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
core.LogAndAddError(
|
core.LogAndAddError(
|
||||||
|
|
@ -322,18 +306,6 @@ func (r *databaseResource) Read(
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Save identity into Terraform state
|
|
||||||
identity := DatabaseResourceIdentityModel{
|
|
||||||
ProjectID: types.StringValue(projectId),
|
|
||||||
Region: types.StringValue(region),
|
|
||||||
InstanceID: types.StringValue(instanceId),
|
|
||||||
DatabaseID: types.Int64Value(int64(databaseResp.GetId())),
|
|
||||||
}
|
|
||||||
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
|
|
||||||
if resp.Diagnostics.HasError() {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set refreshed state
|
// Set refreshed state
|
||||||
diags = resp.State.Set(ctx, model)
|
diags = resp.State.Set(ctx, model)
|
||||||
resp.Diagnostics.Append(diags...)
|
resp.Diagnostics.Append(diags...)
|
||||||
|
|
@ -414,8 +386,8 @@ func (r *databaseResource) Update(
|
||||||
ctx = core.LogResponse(ctx)
|
ctx = core.LogResponse(ctx)
|
||||||
|
|
||||||
databaseResp, err := postgresflexalphaWait.GetDatabaseByIdWaitHandler(ctx, r.client.DefaultAPI, projectId, instanceId, region, databaseId).
|
databaseResp, err := postgresflexalphaWait.GetDatabaseByIdWaitHandler(ctx, r.client.DefaultAPI, projectId, instanceId, region, databaseId).
|
||||||
SetTimeout(15 * time.Minute).
|
SetTimeout(30 * time.Minute).
|
||||||
SetSleepBeforeWait(15 * time.Second).
|
SetSleepBeforeWait(10 * time.Second).
|
||||||
WaitWithContext(ctx)
|
WaitWithContext(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
core.LogAndAddError(ctx, &resp.Diagnostics, "error updating database", err.Error())
|
core.LogAndAddError(ctx, &resp.Diagnostics, "error updating database", err.Error())
|
||||||
|
|
@ -436,18 +408,6 @@ func (r *databaseResource) Update(
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Save identity into Terraform state
|
|
||||||
identity := DatabaseResourceIdentityModel{
|
|
||||||
ProjectID: types.StringValue(projectId),
|
|
||||||
Region: types.StringValue(region),
|
|
||||||
InstanceID: types.StringValue(instanceId),
|
|
||||||
DatabaseID: types.Int64Value(databaseId),
|
|
||||||
}
|
|
||||||
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
|
|
||||||
if resp.Diagnostics.HasError() {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set state to fully populated data
|
// Set state to fully populated data
|
||||||
resp.Diagnostics.Append(resp.State.Set(ctx, &model)...)
|
resp.Diagnostics.Append(resp.State.Set(ctx, &model)...)
|
||||||
if resp.Diagnostics.HasError() {
|
if resp.Diagnostics.HasError() {
|
||||||
|
|
@ -469,38 +429,33 @@ func (r *databaseResource) Delete(
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read identity data
|
|
||||||
var identityData DatabaseResourceIdentityModel
|
|
||||||
resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
|
|
||||||
if resp.Diagnostics.HasError() {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx = core.InitProviderContext(ctx)
|
ctx = core.InitProviderContext(ctx)
|
||||||
|
|
||||||
projectId, region, instanceId, databaseId64, errExt := r.extractIdentityData(model, identityData)
|
projectID := model.ProjectId.ValueString()
|
||||||
if errExt != nil {
|
instanceID := model.InstanceId.ValueString()
|
||||||
core.LogAndAddError(
|
region := model.Region.ValueString()
|
||||||
ctx,
|
databaseID64 := model.DatabaseId.ValueInt64()
|
||||||
&resp.Diagnostics,
|
|
||||||
extractErrorSummary,
|
|
||||||
fmt.Sprintf(extractErrorMessage, errExt),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
if databaseId64 > math.MaxInt32 {
|
if databaseID64 > math.MaxInt32 {
|
||||||
core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (databaseId)")
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error in type conversion", "int value too large (databaseId)")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
databaseId := int32(databaseId64) // nolint:gosec // check is performed above
|
databaseID := int32(databaseID64) // nolint:gosec // check is performed above
|
||||||
ctx = tflog.SetField(ctx, "project_id", projectId)
|
ctx = tflog.SetField(ctx, "project_id", projectID)
|
||||||
ctx = tflog.SetField(ctx, "instance_id", instanceId)
|
ctx = tflog.SetField(ctx, "instance_id", instanceID)
|
||||||
ctx = tflog.SetField(ctx, "region", region)
|
ctx = tflog.SetField(ctx, "region", region)
|
||||||
ctx = tflog.SetField(ctx, "database_id", databaseId)
|
ctx = tflog.SetField(ctx, "database_id", databaseID)
|
||||||
|
|
||||||
// Delete existing record set
|
// Delete existing record set
|
||||||
err := r.client.DefaultAPI.DeleteDatabaseRequest(ctx, projectId, region, instanceId, databaseId).Execute()
|
err := r.client.DefaultAPI.DeleteDatabaseRequest(ctx, projectID, region, instanceID, databaseID).Execute()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
oapiErr, ok := err.(*oapierror.GenericOpenAPIError) // nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
|
||||||
|
if ok {
|
||||||
|
if oapiErr.StatusCode == 404 {
|
||||||
|
resp.State.RemoveResource(ctx)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting database", fmt.Sprintf("Calling API: %v", err))
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting database", fmt.Sprintf("Calling API: %v", err))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -517,109 +472,44 @@ func (r *databaseResource) ImportState(
|
||||||
resp *resource.ImportStateResponse,
|
resp *resource.ImportStateResponse,
|
||||||
) {
|
) {
|
||||||
ctx = core.InitProviderContext(ctx)
|
ctx = core.InitProviderContext(ctx)
|
||||||
|
idParts := strings.Split(req.ID, core.Separator)
|
||||||
|
|
||||||
if req.ID != "" {
|
if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" {
|
||||||
idParts := strings.Split(req.ID, core.Separator)
|
core.LogAndAddError(
|
||||||
|
ctx, &resp.Diagnostics,
|
||||||
|
"Error importing database",
|
||||||
|
fmt.Sprintf(
|
||||||
|
"Expected import identifier with format [project_id],[region],[instance_id],[database_id], got %q",
|
||||||
|
req.ID,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" || idParts[3] == "" {
|
databaseID, err := strconv.ParseInt(idParts[3], 10, 64)
|
||||||
core.LogAndAddError(
|
if err != nil {
|
||||||
ctx, &resp.Diagnostics,
|
core.LogAndAddError(
|
||||||
"Error importing database",
|
|
||||||
fmt.Sprintf(
|
|
||||||
"Expected import identifier with format [project_id],[region],[instance_id],[database_id], got %q",
|
|
||||||
req.ID,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
databaseId, err := strconv.ParseInt(idParts[3], 10, 64)
|
|
||||||
if err != nil {
|
|
||||||
core.LogAndAddError(
|
|
||||||
ctx,
|
|
||||||
&resp.Diagnostics,
|
|
||||||
"Error importing database",
|
|
||||||
fmt.Sprintf("Invalid database_id format: %q. It must be a valid integer.", idParts[3]),
|
|
||||||
)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
|
|
||||||
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
|
|
||||||
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
|
|
||||||
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("database_id"), databaseId)...)
|
|
||||||
|
|
||||||
core.LogAndAddWarning(
|
|
||||||
ctx,
|
ctx,
|
||||||
&resp.Diagnostics,
|
&resp.Diagnostics,
|
||||||
"Postgresflex database imported with empty password",
|
"Error importing database",
|
||||||
"The database password is not imported as it is only available upon creation of a new database. The password field will be empty.",
|
fmt.Sprintf("Invalid database_id format: %q. It must be a valid integer.", idParts[3]),
|
||||||
)
|
)
|
||||||
|
|
||||||
tflog.Info(ctx, "Postgres Flex database state imported")
|
|
||||||
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// If no ID is provided, attempt to read identity attributes from the import configuration
|
tfIDString := utils.BuildInternalTerraformId(idParts...).ValueString()
|
||||||
var identityData DatabaseResourceIdentityModel
|
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("id"), tfIDString)...)
|
||||||
resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
|
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
|
||||||
if resp.Diagnostics.HasError() {
|
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
|
||||||
return
|
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
|
||||||
}
|
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("database_id"), databaseID)...)
|
||||||
|
|
||||||
projectId := identityData.ProjectID.ValueString()
|
core.LogAndAddWarning(
|
||||||
region := identityData.Region.ValueString()
|
ctx,
|
||||||
instanceId := identityData.InstanceID.ValueString()
|
&resp.Diagnostics,
|
||||||
databaseId := identityData.DatabaseID.ValueInt64()
|
"Postgresflex database imported with empty password",
|
||||||
|
"The database password is not imported as it is only available upon creation of a new database. The password field will be empty.",
|
||||||
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
|
)
|
||||||
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
|
|
||||||
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...)
|
|
||||||
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("database_id"), databaseId)...)
|
|
||||||
|
|
||||||
tflog.Info(ctx, "Postgres Flex database state imported")
|
tflog.Info(ctx, "Postgres Flex database state imported")
|
||||||
}
|
}
|
||||||
|
|
||||||
// extractIdentityData extracts essential identifiers from the resource model, falling back to the identity model.
|
|
||||||
func (r *databaseResource) extractIdentityData(
|
|
||||||
model resourceModel,
|
|
||||||
identity DatabaseResourceIdentityModel,
|
|
||||||
) (projectId, region, instanceId string, databaseId int64, err error) {
|
|
||||||
if !model.DatabaseId.IsNull() && !model.DatabaseId.IsUnknown() {
|
|
||||||
databaseId = model.DatabaseId.ValueInt64()
|
|
||||||
} else {
|
|
||||||
if identity.DatabaseID.IsNull() || identity.DatabaseID.IsUnknown() {
|
|
||||||
return "", "", "", 0, fmt.Errorf("database_id not found in config")
|
|
||||||
}
|
|
||||||
databaseId = identity.DatabaseID.ValueInt64()
|
|
||||||
}
|
|
||||||
|
|
||||||
if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() {
|
|
||||||
projectId = model.ProjectId.ValueString()
|
|
||||||
} else {
|
|
||||||
if identity.ProjectID.IsNull() || identity.ProjectID.IsUnknown() {
|
|
||||||
return "", "", "", 0, fmt.Errorf("project_id not found in config")
|
|
||||||
}
|
|
||||||
projectId = identity.ProjectID.ValueString()
|
|
||||||
}
|
|
||||||
|
|
||||||
if !model.Region.IsNull() && !model.Region.IsUnknown() {
|
|
||||||
region = r.providerData.GetRegionWithOverride(model.Region)
|
|
||||||
} else {
|
|
||||||
if identity.Region.IsNull() || identity.Region.IsUnknown() {
|
|
||||||
return "", "", "", 0, fmt.Errorf("region not found in config")
|
|
||||||
}
|
|
||||||
region = r.providerData.GetRegionWithOverride(identity.Region)
|
|
||||||
}
|
|
||||||
|
|
||||||
if !model.InstanceId.IsNull() && !model.InstanceId.IsUnknown() {
|
|
||||||
instanceId = model.InstanceId.ValueString()
|
|
||||||
} else {
|
|
||||||
if identity.InstanceID.IsNull() || identity.InstanceID.IsUnknown() {
|
|
||||||
return "", "", "", 0, fmt.Errorf("instance_id not found in config")
|
|
||||||
}
|
|
||||||
instanceId = identity.InstanceID.ValueString()
|
|
||||||
}
|
|
||||||
return projectId, region, instanceId, databaseId, nil
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -20,7 +20,7 @@ func DatabaseResourceSchema(ctx context.Context) schema.Schema {
|
||||||
Description: "The ID of the database.",
|
Description: "The ID of the database.",
|
||||||
MarkdownDescription: "The ID of the database.",
|
MarkdownDescription: "The ID of the database.",
|
||||||
},
|
},
|
||||||
"id": schema.Int64Attribute{
|
"id": schema.StringAttribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
Description: "The id of the database.",
|
Description: "The id of the database.",
|
||||||
MarkdownDescription: "The id of the database.",
|
MarkdownDescription: "The id of the database.",
|
||||||
|
|
@ -65,7 +65,7 @@ func DatabaseResourceSchema(ctx context.Context) schema.Schema {
|
||||||
|
|
||||||
type DatabaseModel struct {
|
type DatabaseModel struct {
|
||||||
DatabaseId types.Int64 `tfsdk:"database_id"`
|
DatabaseId types.Int64 `tfsdk:"database_id"`
|
||||||
Id types.Int64 `tfsdk:"id"`
|
Id types.String `tfsdk:"id"`
|
||||||
InstanceId types.String `tfsdk:"instance_id"`
|
InstanceId types.String `tfsdk:"instance_id"`
|
||||||
Name types.String `tfsdk:"name"`
|
Name types.String `tfsdk:"name"`
|
||||||
Owner types.String `tfsdk:"owner"`
|
Owner types.String `tfsdk:"owner"`
|
||||||
|
|
|
||||||
|
|
@ -11,7 +11,7 @@ import (
|
||||||
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
|
||||||
postgresflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavors/datasources_gen"
|
postgresflexalphaGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavor/datasources_gen"
|
||||||
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
|
postgresflexUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/utils"
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
|
||||||
|
|
||||||
|
|
@ -220,14 +220,24 @@ func (r *flavorDataSource) Read(ctx context.Context, req datasource.ReadRequest,
|
||||||
} else {
|
} else {
|
||||||
var scList []attr.Value
|
var scList []attr.Value
|
||||||
for _, sc := range f.StorageClasses {
|
for _, sc := range f.StorageClasses {
|
||||||
|
mIop := types.Int32Null()
|
||||||
|
if val, ok := sc.GetMaxIoPerSecOk(); ok {
|
||||||
|
mIop = types.Int32Value(*val)
|
||||||
|
}
|
||||||
|
|
||||||
|
mThrough := types.Int32Null()
|
||||||
|
if val, ok := sc.GetMaxThroughInMbOk(); ok {
|
||||||
|
mThrough = types.Int32Value(*val)
|
||||||
|
}
|
||||||
|
|
||||||
scList = append(
|
scList = append(
|
||||||
scList,
|
scList,
|
||||||
postgresflexalphaGen.NewStorageClassesValueMust(
|
postgresflexalphaGen.NewStorageClassesValueMust(
|
||||||
postgresflexalphaGen.StorageClassesValue{}.AttributeTypes(ctx),
|
postgresflexalphaGen.StorageClassesValue{}.AttributeTypes(ctx),
|
||||||
map[string]attr.Value{
|
map[string]attr.Value{
|
||||||
"class": types.StringValue(sc.Class),
|
"class": types.StringValue(sc.Class),
|
||||||
"max_io_per_sec": types.Int32Value(sc.MaxIoPerSec),
|
"max_io_per_sec": mIop,
|
||||||
"max_through_in_mb": types.Int32Value(sc.MaxThroughInMb),
|
"max_through_in_mb": mThrough,
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
package postgresflexalpha
|
package postgresflexalphaflavors
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,65 @@
|
||||||
|
package postgresflexalphaflavors
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
|
)
|
||||||
|
|
||||||
|
type flavorsClientReader interface {
|
||||||
|
GetFlavorsRequest(
|
||||||
|
ctx context.Context,
|
||||||
|
projectId, region string,
|
||||||
|
) v3alpha1api.ApiGetFlavorsRequestRequest
|
||||||
|
}
|
||||||
|
|
||||||
|
func getAllFlavors(ctx context.Context, client flavorsClientReader, projectId, region string) (
|
||||||
|
[]v3alpha1api.ListFlavors,
|
||||||
|
error,
|
||||||
|
) {
|
||||||
|
getAllFilter := func(_ v3alpha1api.ListFlavors) bool { return true }
|
||||||
|
flavorList, err := getFlavorsByFilter(ctx, client, projectId, region, getAllFilter)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return flavorList, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// getFlavorsByFilter is a helper function to retrieve flavors using a filtern function.
|
||||||
|
// Hint: The API does not have a GetFlavors endpoint, only ListFlavors
|
||||||
|
func getFlavorsByFilter(
|
||||||
|
ctx context.Context,
|
||||||
|
client flavorsClientReader,
|
||||||
|
projectId, region string,
|
||||||
|
filter func(db v3alpha1api.ListFlavors) bool,
|
||||||
|
) ([]v3alpha1api.ListFlavors, error) {
|
||||||
|
if projectId == "" || region == "" {
|
||||||
|
return nil, fmt.Errorf("listing postgresflex flavors: projectId and region are required")
|
||||||
|
}
|
||||||
|
|
||||||
|
const pageSize = 25
|
||||||
|
|
||||||
|
var result = make([]v3alpha1api.ListFlavors, 0)
|
||||||
|
|
||||||
|
for page := int32(1); ; page++ {
|
||||||
|
res, err := client.GetFlavorsRequest(ctx, projectId, region).
|
||||||
|
Page(page).Size(pageSize).Sort(v3alpha1api.FLAVORSORT_ID_ASC).Execute()
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("requesting flavors list (page %d): %w", page, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the API returns no flavors, we have reached the end of the list.
|
||||||
|
if len(res.Flavors) == 0 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, flavor := range res.Flavors {
|
||||||
|
if filter(flavor) {
|
||||||
|
result = append(result, flavor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,135 @@
|
||||||
|
package postgresflexalphaflavors
|
||||||
|
|
||||||
|
/*
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
postgresflex "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
|
)
|
||||||
|
|
||||||
|
type mockRequest struct {
|
||||||
|
executeFunc func() (*postgresflex.GetFlavorsResponse, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *mockRequest) Page(_ int32) postgresflex.ApiGetFlavorsRequestRequest { return m }
|
||||||
|
func (m *mockRequest) Size(_ int32) postgresflex.ApiGetFlavorsRequestRequest { return m }
|
||||||
|
func (m *mockRequest) Sort(_ postgresflex.FlavorSort) postgresflex.ApiGetFlavorsRequestRequest {
|
||||||
|
return m
|
||||||
|
}
|
||||||
|
func (m *mockRequest) Execute() (*postgresflex.GetFlavorsResponse, error) {
|
||||||
|
return m.executeFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
type mockFlavorsClient struct {
|
||||||
|
executeRequest func() postgresflex.ApiGetFlavorsRequestRequest
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *mockFlavorsClient) GetFlavorsRequest(_ context.Context, _, _ string) postgresflex.ApiGetFlavorsRequestRequest {
|
||||||
|
return m.executeRequest()
|
||||||
|
}
|
||||||
|
|
||||||
|
var mockResp = func(page int32) (*postgresflex.GetFlavorsResponse, error) {
|
||||||
|
if page == 1 {
|
||||||
|
return &postgresflex.GetFlavorsResponse{
|
||||||
|
Flavors: []postgresflex.ListFlavors{
|
||||||
|
{Id: "flavor-1", Description: "first"},
|
||||||
|
{Id: "flavor-2", Description: "second"},
|
||||||
|
},
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
if page == 2 {
|
||||||
|
return &postgresflex.GetFlavorsResponse{
|
||||||
|
Flavors: []postgresflex.ListFlavors{
|
||||||
|
{Id: "flavor-3", Description: "three"},
|
||||||
|
},
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return &postgresflex.GetFlavorsResponse{
|
||||||
|
Flavors: []postgresflex.ListFlavors{},
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetFlavorsByFilter(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
description string
|
||||||
|
projectId string
|
||||||
|
region string
|
||||||
|
mockErr error
|
||||||
|
filter func(postgresflex.ListFlavors) bool
|
||||||
|
wantCount int
|
||||||
|
wantErr bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
description: "Success - Get all flavors (2 pages)",
|
||||||
|
projectId: "pid", region: "reg",
|
||||||
|
filter: func(_ postgresflex.ListFlavors) bool { return true },
|
||||||
|
wantCount: 3,
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: "Success - Filter flavors by description",
|
||||||
|
projectId: "pid", region: "reg",
|
||||||
|
filter: func(f postgresflex.ListFlavors) bool { return f.Description == "first" },
|
||||||
|
wantCount: 1,
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: "Error - Missing parameters",
|
||||||
|
projectId: "", region: "reg",
|
||||||
|
wantErr: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(
|
||||||
|
tt.description, func(t *testing.T) {
|
||||||
|
var currentPage int32
|
||||||
|
client := &mockFlavorsClient{
|
||||||
|
executeRequest: func() postgresflex.ApiGetFlavorsRequestRequest {
|
||||||
|
return mockRequest{
|
||||||
|
executeFunc: func() (*postgresflex.GetFlavorsResponse, error) {
|
||||||
|
currentPage++
|
||||||
|
return mockResp(currentPage)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
actual, err := getFlavorsByFilter(context.Background(), client, tt.projectId, tt.region, tt.filter)
|
||||||
|
|
||||||
|
if (err != nil) != tt.wantErr {
|
||||||
|
t.Errorf("getFlavorsByFilter() error = %v, wantErr %v", err, tt.wantErr)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if !tt.wantErr && len(actual) != tt.wantCount {
|
||||||
|
t.Errorf("getFlavorsByFilter() got %d flavors, want %d", len(actual), tt.wantCount)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetAllFlavors(t *testing.T) {
|
||||||
|
var currentPage int32
|
||||||
|
client := &mockFlavorsClient{
|
||||||
|
executeRequest: func() postgresflex.ApiGetFlavorsRequestRequest {
|
||||||
|
return mockRequest{
|
||||||
|
executeFunc: func() (*postgresflex.GetFlavorsResponse, error) {
|
||||||
|
currentPage++
|
||||||
|
return mockResp(currentPage)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
res, err := getAllFlavors(context.Background(), client, "pid", "reg")
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("getAllFlavors() unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
if len(res) != 3 {
|
||||||
|
t.Errorf("getAllFlavors() expected 3 flavor, got %d", len(res))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
@ -5,6 +5,7 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
|
||||||
"github.com/hashicorp/terraform-plugin-framework/types"
|
"github.com/hashicorp/terraform-plugin-framework/types"
|
||||||
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
|
|
||||||
|
|
@ -72,7 +73,13 @@ func (r *instanceDataSource) Configure(
|
||||||
|
|
||||||
// Schema defines the schema for the data source.
|
// Schema defines the schema for the data source.
|
||||||
func (r *instanceDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
|
func (r *instanceDataSource) Schema(ctx context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
|
||||||
resp.Schema = postgresflexalpha2.InstanceDataSourceSchema(ctx)
|
sch := postgresflexalpha2.InstanceDataSourceSchema(ctx)
|
||||||
|
sch.Attributes["id"] = schema.StringAttribute{
|
||||||
|
Computed: true,
|
||||||
|
Description: "internal ID",
|
||||||
|
MarkdownDescription: "internal ID",
|
||||||
|
}
|
||||||
|
resp.Schema = sch
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read refreshes the Terraform state with the latest data.
|
// Read refreshes the Terraform state with the latest data.
|
||||||
|
|
@ -90,22 +97,22 @@ func (r *instanceDataSource) Read(
|
||||||
|
|
||||||
ctx = core.InitProviderContext(ctx)
|
ctx = core.InitProviderContext(ctx)
|
||||||
|
|
||||||
projectId := model.ProjectId.ValueString()
|
projectID := model.ProjectId.ValueString()
|
||||||
instanceId := model.InstanceId.ValueString()
|
instanceID := model.InstanceId.ValueString()
|
||||||
region := r.providerData.GetRegionWithOverride(model.Region)
|
region := r.providerData.GetRegionWithOverride(model.Region)
|
||||||
ctx = tflog.SetField(ctx, "project_id", projectId)
|
ctx = tflog.SetField(ctx, "project_id", projectID)
|
||||||
ctx = tflog.SetField(ctx, "instance_id", instanceId)
|
ctx = tflog.SetField(ctx, "instance_id", instanceID)
|
||||||
ctx = tflog.SetField(ctx, "region", region)
|
ctx = tflog.SetField(ctx, "region", region)
|
||||||
instanceResp, err := r.client.DefaultAPI.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
|
instanceResp, err := r.client.DefaultAPI.GetInstanceRequest(ctx, projectID, region, instanceID).Execute()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
utils.LogError(
|
utils.LogError(
|
||||||
ctx,
|
ctx,
|
||||||
&resp.Diagnostics,
|
&resp.Diagnostics,
|
||||||
err,
|
err,
|
||||||
"Reading instance",
|
"Reading instance",
|
||||||
fmt.Sprintf("Instance with ID %q does not exist in project %q.", instanceId, projectId),
|
fmt.Sprintf("Instance with ID %q does not exist in project %q.", instanceID, projectID),
|
||||||
map[int]string{
|
map[int]string{
|
||||||
http.StatusForbidden: fmt.Sprintf("Project with ID %q not found or forbidden access", projectId),
|
http.StatusForbidden: fmt.Sprintf("Project with ID %q not found or forbidden access", projectID),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
resp.State.RemoveResource(ctx)
|
resp.State.RemoveResource(ctx)
|
||||||
|
|
|
||||||
|
|
@ -115,6 +115,12 @@ func InstanceDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
Description: "Whether the instance can be deleted or not.",
|
Description: "Whether the instance can be deleted or not.",
|
||||||
MarkdownDescription: "Whether the instance can be deleted or not.",
|
MarkdownDescription: "Whether the instance can be deleted or not.",
|
||||||
},
|
},
|
||||||
|
"labels": schema.MapAttribute{
|
||||||
|
ElementType: types.StringType,
|
||||||
|
Computed: true,
|
||||||
|
Description: "Key-value pairs, 63 characters max, begin and end with an alphanumerical character,\nmay contain dashes (-), underscores (_), dots (.), and alphanumerics between. Key MUST be at least 1 character.\nMax 64 labels\nRegex for keys: ^(?=.{1,63}$)([A-Za-z0-9][-A-Za-z0-9_.]*)?[A-Za-z0-9]$\nRegex for values: ^(?=.{0,63}$)(([A-Za-z0-9][-A-Za-z0-9_.]*)?[A-Za-z0-9])*$\nThe stackit- prefix is reserved and cannot be used for Keys.\n",
|
||||||
|
MarkdownDescription: "Key-value pairs, 63 characters max, begin and end with an alphanumerical character,\nmay contain dashes (-), underscores (_), dots (.), and alphanumerics between. Key MUST be at least 1 character.\nMax 64 labels\nRegex for keys: ^(?=.{1,63}$)([A-Za-z0-9][-A-Za-z0-9_.]*)?[A-Za-z0-9]$\nRegex for values: ^(?=.{0,63}$)(([A-Za-z0-9][-A-Za-z0-9_.]*)?[A-Za-z0-9])*$\nThe stackit- prefix is reserved and cannot be used for Keys.\n",
|
||||||
|
},
|
||||||
"name": schema.StringAttribute{
|
"name": schema.StringAttribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
Description: "The name of the instance.",
|
Description: "The name of the instance.",
|
||||||
|
|
@ -155,7 +161,7 @@ func InstanceDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
MarkdownDescription: "The STACKIT project ID.",
|
MarkdownDescription: "The STACKIT project ID.",
|
||||||
},
|
},
|
||||||
"region": schema.StringAttribute{
|
"region": schema.StringAttribute{
|
||||||
Required: true,
|
Optional: true,
|
||||||
Description: "The region which should be addressed",
|
Description: "The region which should be addressed",
|
||||||
MarkdownDescription: "The region which should be addressed",
|
MarkdownDescription: "The region which should be addressed",
|
||||||
Validators: []validator.String{
|
Validators: []validator.String{
|
||||||
|
|
@ -171,8 +177,8 @@ func InstanceDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
},
|
},
|
||||||
"retention_days": schema.Int64Attribute{
|
"retention_days": schema.Int64Attribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
Description: "How long backups are retained. The value can only be between 32 and 365 days.",
|
Description: "How long backups are retained. The value can only be between 32 and 90 days.",
|
||||||
MarkdownDescription: "How long backups are retained. The value can only be between 32 and 365 days.",
|
MarkdownDescription: "How long backups are retained. The value can only be between 32 and 90 days.",
|
||||||
},
|
},
|
||||||
"status": schema.StringAttribute{
|
"status": schema.StringAttribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
|
|
@ -219,6 +225,7 @@ type InstanceModel struct {
|
||||||
Id types.String `tfsdk:"tf_original_api_id"`
|
Id types.String `tfsdk:"tf_original_api_id"`
|
||||||
InstanceId types.String `tfsdk:"instance_id"`
|
InstanceId types.String `tfsdk:"instance_id"`
|
||||||
IsDeletable types.Bool `tfsdk:"is_deletable"`
|
IsDeletable types.Bool `tfsdk:"is_deletable"`
|
||||||
|
Labels types.Map `tfsdk:"labels"`
|
||||||
Name types.String `tfsdk:"name"`
|
Name types.String `tfsdk:"name"`
|
||||||
Network NetworkValue `tfsdk:"network"`
|
Network NetworkValue `tfsdk:"network"`
|
||||||
ProjectId types.String `tfsdk:"project_id"`
|
ProjectId types.String `tfsdk:"project_id"`
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,65 @@
|
||||||
|
package postgresflexalpha
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
|
)
|
||||||
|
|
||||||
|
type flavorsClientReader interface {
|
||||||
|
GetFlavorsRequest(
|
||||||
|
ctx context.Context,
|
||||||
|
projectId, region string,
|
||||||
|
) v3alpha1api.ApiGetFlavorsRequestRequest
|
||||||
|
}
|
||||||
|
|
||||||
|
func getAllFlavors(ctx context.Context, client flavorsClientReader, projectId, region string) (
|
||||||
|
[]v3alpha1api.ListFlavors,
|
||||||
|
error,
|
||||||
|
) {
|
||||||
|
getAllFilter := func(_ v3alpha1api.ListFlavors) bool { return true }
|
||||||
|
flavorList, err := getFlavorsByFilter(ctx, client, projectId, region, getAllFilter)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return flavorList, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// getFlavorsByFilter is a helper function to retrieve flavors using a filtern function.
|
||||||
|
// Hint: The API does not have a GetFlavors endpoint, only ListFlavors
|
||||||
|
func getFlavorsByFilter(
|
||||||
|
ctx context.Context,
|
||||||
|
client flavorsClientReader,
|
||||||
|
projectId, region string,
|
||||||
|
filter func(db v3alpha1api.ListFlavors) bool,
|
||||||
|
) ([]v3alpha1api.ListFlavors, error) {
|
||||||
|
if projectId == "" || region == "" {
|
||||||
|
return nil, fmt.Errorf("listing postgresflex flavors: projectId and region are required")
|
||||||
|
}
|
||||||
|
|
||||||
|
const pageSize = 25
|
||||||
|
|
||||||
|
var result = make([]v3alpha1api.ListFlavors, 0)
|
||||||
|
|
||||||
|
for page := int32(1); ; page++ {
|
||||||
|
res, err := client.GetFlavorsRequest(ctx, projectId, region).
|
||||||
|
Page(page).Size(pageSize).Sort(v3alpha1api.FLAVORSORT_ID_ASC).Execute()
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("requesting flavors list (page %d): %w", page, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the API returns no flavors, we have reached the end of the list.
|
||||||
|
if len(res.Flavors) == 0 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, flavor := range res.Flavors {
|
||||||
|
if filter(flavor) {
|
||||||
|
result = append(result, flavor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,135 @@
|
||||||
|
package postgresflexalpha
|
||||||
|
|
||||||
|
/*
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
postgresflex "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
|
)
|
||||||
|
|
||||||
|
type mockRequest struct {
|
||||||
|
executeFunc func() (*postgresflex.GetFlavorsResponse, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *mockRequest) Page(_ int32) postgresflex.ApiGetFlavorsRequestRequest { return m }
|
||||||
|
func (m *mockRequest) Size(_ int32) postgresflex.ApiGetFlavorsRequestRequest { return m }
|
||||||
|
func (m *mockRequest) Sort(_ postgresflex.FlavorSort) postgresflex.ApiGetFlavorsRequestRequest {
|
||||||
|
return m
|
||||||
|
}
|
||||||
|
func (m *mockRequest) Execute() (*postgresflex.GetFlavorsResponse, error) {
|
||||||
|
return m.executeFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
type mockFlavorsClient struct {
|
||||||
|
executeRequest func() postgresflex.ApiGetFlavorsRequestRequest
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *mockFlavorsClient) GetFlavorsRequest(_ context.Context, _, _ string) postgresflex.ApiGetFlavorsRequestRequest {
|
||||||
|
return m.executeRequest()
|
||||||
|
}
|
||||||
|
|
||||||
|
var mockResp = func(page int32) (*postgresflex.GetFlavorsResponse, error) {
|
||||||
|
if page == 1 {
|
||||||
|
return &postgresflex.GetFlavorsResponse{
|
||||||
|
Flavors: []postgresflex.ListFlavors{
|
||||||
|
{Id: "flavor-1", Description: "first"},
|
||||||
|
{Id: "flavor-2", Description: "second"},
|
||||||
|
},
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
if page == 2 {
|
||||||
|
return &postgresflex.GetFlavorsResponse{
|
||||||
|
Flavors: []postgresflex.ListFlavors{
|
||||||
|
{Id: "flavor-3", Description: "three"},
|
||||||
|
},
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return &postgresflex.GetFlavorsResponse{
|
||||||
|
Flavors: []postgresflex.ListFlavors{},
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetFlavorsByFilter(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
description string
|
||||||
|
projectId string
|
||||||
|
region string
|
||||||
|
mockErr error
|
||||||
|
filter func(postgresflex.ListFlavors) bool
|
||||||
|
wantCount int
|
||||||
|
wantErr bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
description: "Success - Get all flavors (2 pages)",
|
||||||
|
projectId: "pid", region: "reg",
|
||||||
|
filter: func(_ postgresflex.ListFlavors) bool { return true },
|
||||||
|
wantCount: 3,
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: "Success - Filter flavors by description",
|
||||||
|
projectId: "pid", region: "reg",
|
||||||
|
filter: func(f postgresflex.ListFlavors) bool { return f.Description == "first" },
|
||||||
|
wantCount: 1,
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: "Error - Missing parameters",
|
||||||
|
projectId: "", region: "reg",
|
||||||
|
wantErr: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(
|
||||||
|
tt.description, func(t *testing.T) {
|
||||||
|
var currentPage int32
|
||||||
|
client := &mockFlavorsClient{
|
||||||
|
executeRequest: func() postgresflex.ApiGetFlavorsRequestRequest {
|
||||||
|
return mockRequest{
|
||||||
|
executeFunc: func() (*postgresflex.GetFlavorsResponse, error) {
|
||||||
|
currentPage++
|
||||||
|
return mockResp(currentPage)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
actual, err := getFlavorsByFilter(context.Background(), client, tt.projectId, tt.region, tt.filter)
|
||||||
|
|
||||||
|
if (err != nil) != tt.wantErr {
|
||||||
|
t.Errorf("getFlavorsByFilter() error = %v, wantErr %v", err, tt.wantErr)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if !tt.wantErr && len(actual) != tt.wantCount {
|
||||||
|
t.Errorf("getFlavorsByFilter() got %d flavors, want %d", len(actual), tt.wantCount)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetAllFlavors(t *testing.T) {
|
||||||
|
var currentPage int32
|
||||||
|
client := &mockFlavorsClient{
|
||||||
|
executeRequest: func() postgresflex.ApiGetFlavorsRequestRequest {
|
||||||
|
return mockRequest{
|
||||||
|
executeFunc: func() (*postgresflex.GetFlavorsResponse, error) {
|
||||||
|
currentPage++
|
||||||
|
return mockResp(currentPage)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
res, err := getAllFlavors(context.Background(), client, "pid", "reg")
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("getAllFlavors() unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
if len(res) != 3 {
|
||||||
|
t.Errorf("getAllFlavors() expected 3 flavor, got %d", len(res))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
@ -16,7 +16,7 @@ import (
|
||||||
|
|
||||||
func mapGetInstanceResponseToModel(
|
func mapGetInstanceResponseToModel(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
m *postgresflexalpharesource.InstanceModel,
|
m *LocalInstanceModel,
|
||||||
resp *postgresflex.GetInstanceResponse,
|
resp *postgresflex.GetInstanceResponse,
|
||||||
) error {
|
) error {
|
||||||
m.BackupSchedule = types.StringValue(resp.GetBackupSchedule())
|
m.BackupSchedule = types.StringValue(resp.GetBackupSchedule())
|
||||||
|
|
@ -55,23 +55,23 @@ func mapGetInstanceResponseToModel(
|
||||||
}
|
}
|
||||||
|
|
||||||
m.FlavorId = types.StringValue(resp.GetFlavorId())
|
m.FlavorId = types.StringValue(resp.GetFlavorId())
|
||||||
if m.Id.IsNull() || m.Id.IsUnknown() {
|
m.Id = utils.BuildInternalTerraformId(
|
||||||
m.Id = utils.BuildInternalTerraformId(
|
m.ProjectId.ValueString(),
|
||||||
m.ProjectId.ValueString(),
|
m.Region.ValueString(),
|
||||||
m.Region.ValueString(),
|
resp.Id,
|
||||||
m.InstanceId.ValueString(),
|
)
|
||||||
)
|
|
||||||
}
|
|
||||||
m.InstanceId = types.StringValue(resp.Id)
|
m.InstanceId = types.StringValue(resp.Id)
|
||||||
|
|
||||||
m.IsDeletable = types.BoolValue(resp.GetIsDeletable())
|
m.IsDeletable = types.BoolValue(resp.GetIsDeletable())
|
||||||
|
|
||||||
netAcl, diags := types.ListValueFrom(ctx, types.StringType, resp.Network.GetAcl())
|
netACL, diags := types.ListValueFrom(ctx, types.StringType, resp.Network.GetAcl())
|
||||||
if diags.HasError() {
|
if diags.HasError() {
|
||||||
return fmt.Errorf("failed converting network acl from response")
|
return fmt.Errorf("failed converting network acl from response")
|
||||||
}
|
}
|
||||||
|
|
||||||
m.Acl = netAcl
|
m.Acl = netACL
|
||||||
|
|
||||||
|
// m.Labels = resp.GetLabels()
|
||||||
|
|
||||||
netInstAdd := types.StringValue("")
|
netInstAdd := types.StringValue("")
|
||||||
if instAdd, ok := resp.Network.GetInstanceAddressOk(); ok {
|
if instAdd, ok := resp.Network.GetInstanceAddressOk(); ok {
|
||||||
|
|
@ -87,7 +87,7 @@ func mapGetInstanceResponseToModel(
|
||||||
postgresflexalpharesource.NetworkValue{}.AttributeTypes(ctx),
|
postgresflexalpharesource.NetworkValue{}.AttributeTypes(ctx),
|
||||||
map[string]attr.Value{
|
map[string]attr.Value{
|
||||||
"access_scope": basetypes.NewStringValue(string(resp.Network.GetAccessScope())),
|
"access_scope": basetypes.NewStringValue(string(resp.Network.GetAccessScope())),
|
||||||
"acl": netAcl,
|
"acl": netACL,
|
||||||
"instance_address": netInstAdd,
|
"instance_address": netInstAdd,
|
||||||
"router_address": netRtrAdd,
|
"router_address": netRtrAdd,
|
||||||
},
|
},
|
||||||
|
|
@ -130,7 +130,8 @@ func mapGetDataInstanceResponseToModel(
|
||||||
handleConnectionInfo(ctx, m, resp)
|
handleConnectionInfo(ctx, m, resp)
|
||||||
|
|
||||||
m.FlavorId = types.StringValue(resp.GetFlavorId())
|
m.FlavorId = types.StringValue(resp.GetFlavorId())
|
||||||
m.Id = utils.BuildInternalTerraformId(m.ProjectId.ValueString(), m.Region.ValueString(), m.InstanceId.ValueString())
|
m.Id = types.StringValue(resp.Id)
|
||||||
|
m.TerraformID = utils.BuildInternalTerraformId(m.ProjectId.ValueString(), m.Region.ValueString(), m.InstanceId.ValueString())
|
||||||
m.InstanceId = types.StringValue(resp.Id)
|
m.InstanceId = types.StringValue(resp.Id)
|
||||||
m.IsDeletable = types.BoolValue(resp.GetIsDeletable())
|
m.IsDeletable = types.BoolValue(resp.GetIsDeletable())
|
||||||
m.Name = types.StringValue(resp.GetName())
|
m.Name = types.StringValue(resp.GetName())
|
||||||
|
|
@ -212,14 +213,14 @@ func handleNetwork(ctx context.Context, m *dataSourceModel, resp *postgresflex.G
|
||||||
}
|
}
|
||||||
|
|
||||||
func handleEncryption(m *dataSourceModel, resp *postgresflex.GetInstanceResponse) {
|
func handleEncryption(m *dataSourceModel, resp *postgresflex.GetInstanceResponse) {
|
||||||
keyId := ""
|
keyID := ""
|
||||||
if keyIdVal, ok := resp.Encryption.GetKekKeyIdOk(); ok {
|
if keyIDVal, ok := resp.Encryption.GetKekKeyIdOk(); ok {
|
||||||
keyId = *keyIdVal
|
keyID = *keyIDVal
|
||||||
}
|
}
|
||||||
|
|
||||||
keyRingId := ""
|
keyRingID := ""
|
||||||
if keyRingIdVal, ok := resp.Encryption.GetKekKeyRingIdOk(); ok {
|
if keyRingIDVal, ok := resp.Encryption.GetKekKeyRingIdOk(); ok {
|
||||||
keyRingId = *keyRingIdVal
|
keyRingID = *keyRingIDVal
|
||||||
}
|
}
|
||||||
|
|
||||||
keyVersion := ""
|
keyVersion := ""
|
||||||
|
|
@ -233,8 +234,8 @@ func handleEncryption(m *dataSourceModel, resp *postgresflex.GetInstanceResponse
|
||||||
}
|
}
|
||||||
|
|
||||||
m.Encryption = postgresflexalphadatasource.EncryptionValue{
|
m.Encryption = postgresflexalphadatasource.EncryptionValue{
|
||||||
KekKeyId: types.StringValue(keyId),
|
KekKeyId: types.StringValue(keyID),
|
||||||
KekKeyRingId: types.StringValue(keyRingId),
|
KekKeyRingId: types.StringValue(keyRingID),
|
||||||
KekKeyVersion: types.StringValue(keyVersion),
|
KekKeyVersion: types.StringValue(keyVersion),
|
||||||
ServiceAccount: types.StringValue(svcAcc),
|
ServiceAccount: types.StringValue(svcAcc),
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,6 @@ import (
|
||||||
|
|
||||||
postgresflex "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
postgresflex "github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
|
|
||||||
postgresflexalpharesource "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/resources_gen"
|
|
||||||
utils2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
|
utils2 "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -171,7 +170,7 @@ func Test_mapGetInstanceResponseToModel(t *testing.T) {
|
||||||
t.Skipf("please implement")
|
t.Skipf("please implement")
|
||||||
type args struct {
|
type args struct {
|
||||||
ctx context.Context
|
ctx context.Context
|
||||||
m *postgresflexalpharesource.InstanceModel
|
m *LocalInstanceModel
|
||||||
resp *postgresflex.GetInstanceResponse
|
resp *postgresflex.GetInstanceResponse
|
||||||
}
|
}
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
|
|
|
||||||
|
|
@ -7,16 +7,18 @@ import (
|
||||||
"math"
|
"math"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strings"
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/hashicorp/terraform-plugin-framework/path"
|
"github.com/hashicorp/terraform-plugin-framework/path"
|
||||||
"github.com/hashicorp/terraform-plugin-framework/resource"
|
"github.com/hashicorp/terraform-plugin-framework/resource"
|
||||||
"github.com/hashicorp/terraform-plugin-framework/resource/identityschema"
|
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
|
||||||
|
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
|
||||||
"github.com/hashicorp/terraform-plugin-framework/types"
|
"github.com/hashicorp/terraform-plugin-framework/types"
|
||||||
|
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
|
||||||
"github.com/hashicorp/terraform-plugin-log/tflog"
|
"github.com/hashicorp/terraform-plugin-log/tflog"
|
||||||
"github.com/stackitcloud/stackit-sdk-go/core/oapierror"
|
"github.com/stackitcloud/stackit-sdk-go/core/oapierror"
|
||||||
coreUtils "github.com/stackitcloud/stackit-sdk-go/core/utils"
|
coreUtils "github.com/stackitcloud/stackit-sdk-go/core/utils"
|
||||||
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
"github.com/stackitcloud/stackit-sdk-go/services/postgresflex/v3alpha1api"
|
||||||
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/core"
|
||||||
postgresflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/resources_gen"
|
postgresflexalpha "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/instance/resources_gen"
|
||||||
|
|
@ -32,7 +34,6 @@ var (
|
||||||
_ resource.ResourceWithImportState = &instanceResource{}
|
_ resource.ResourceWithImportState = &instanceResource{}
|
||||||
_ resource.ResourceWithModifyPlan = &instanceResource{}
|
_ resource.ResourceWithModifyPlan = &instanceResource{}
|
||||||
_ resource.ResourceWithValidateConfig = &instanceResource{}
|
_ resource.ResourceWithValidateConfig = &instanceResource{}
|
||||||
_ resource.ResourceWithIdentity = &instanceResource{}
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// NewInstanceResource is a helper function to simplify the provider implementation.
|
// NewInstanceResource is a helper function to simplify the provider implementation.
|
||||||
|
|
@ -40,27 +41,31 @@ func NewInstanceResource() resource.Resource {
|
||||||
return &instanceResource{}
|
return &instanceResource{}
|
||||||
}
|
}
|
||||||
|
|
||||||
// resourceModel describes the resource data model.
|
|
||||||
type resourceModel = postgresflexalpha.InstanceModel
|
|
||||||
|
|
||||||
type InstanceResourceIdentityModel struct {
|
|
||||||
ProjectID types.String `tfsdk:"project_id"`
|
|
||||||
Region types.String `tfsdk:"region"`
|
|
||||||
InstanceID types.String `tfsdk:"instance_id"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// instanceResource is the resource implementation.
|
// instanceResource is the resource implementation.
|
||||||
type instanceResource struct {
|
type instanceResource struct {
|
||||||
client *v3alpha1api.APIClient
|
client *v3alpha1api.APIClient
|
||||||
providerData core.ProviderData
|
providerData core.ProviderData
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type LocalInstanceModel struct {
|
||||||
|
postgresflexalpha.InstanceModel
|
||||||
|
Flavor types.Object `tfsdk:"flavor"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Struct corresponding to Model.Flavor
|
||||||
|
type flavorModel struct {
|
||||||
|
Id types.String `tfsdk:"id"`
|
||||||
|
Description types.String `tfsdk:"description"`
|
||||||
|
CPU types.Int64 `tfsdk:"cpu"`
|
||||||
|
RAM types.Int64 `tfsdk:"ram"`
|
||||||
|
}
|
||||||
|
|
||||||
func (r *instanceResource) ValidateConfig(
|
func (r *instanceResource) ValidateConfig(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
req resource.ValidateConfigRequest,
|
req resource.ValidateConfigRequest,
|
||||||
resp *resource.ValidateConfigResponse,
|
resp *resource.ValidateConfigResponse,
|
||||||
) {
|
) {
|
||||||
var data resourceModel
|
var data LocalInstanceModel
|
||||||
resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
|
resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
|
||||||
|
|
||||||
if resp.Diagnostics.HasError() {
|
if resp.Diagnostics.HasError() {
|
||||||
|
|
@ -75,6 +80,23 @@ func (r *instanceResource) ValidateConfig(
|
||||||
"The resource may return unexpected results.",
|
"The resource may return unexpected results.",
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if data.FlavorId.IsNull() {
|
||||||
|
if data.Flavor.IsUnknown() || data.Flavor.IsNull() {
|
||||||
|
resp.Diagnostics.AddAttributeError(
|
||||||
|
path.Root("flavor"),
|
||||||
|
"Missing Attribute Configuration",
|
||||||
|
"Expected flavor to be configured. "+
|
||||||
|
"The resource may return unexpected results.",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
resp.Diagnostics.AddAttributeWarning(
|
||||||
|
path.Root("flavor"),
|
||||||
|
"Attribute Configuration Deprecation",
|
||||||
|
"Using flavor is deprecated, "+
|
||||||
|
"please use flavor_id instead.",
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// ModifyPlan implements resource.ResourceWithModifyPlan.
|
// ModifyPlan implements resource.ResourceWithModifyPlan.
|
||||||
|
|
@ -84,7 +106,7 @@ func (r *instanceResource) ModifyPlan(
|
||||||
req resource.ModifyPlanRequest,
|
req resource.ModifyPlanRequest,
|
||||||
resp *resource.ModifyPlanResponse,
|
resp *resource.ModifyPlanResponse,
|
||||||
) { // nolint:gocritic // function signature required by Terraform
|
) { // nolint:gocritic // function signature required by Terraform
|
||||||
var configModel resourceModel
|
var configModel LocalInstanceModel
|
||||||
// skip initial empty configuration to avoid follow-up errors
|
// skip initial empty configuration to avoid follow-up errors
|
||||||
if req.Config.Raw.IsNull() {
|
if req.Config.Raw.IsNull() {
|
||||||
return
|
return
|
||||||
|
|
@ -94,7 +116,7 @@ func (r *instanceResource) ModifyPlan(
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
var planModel resourceModel
|
var planModel LocalInstanceModel
|
||||||
resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...)
|
resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...)
|
||||||
if resp.Diagnostics.HasError() {
|
if resp.Diagnostics.HasError() {
|
||||||
return
|
return
|
||||||
|
|
@ -144,40 +166,53 @@ func (r *instanceResource) Configure(
|
||||||
var modifiersFileByte []byte
|
var modifiersFileByte []byte
|
||||||
|
|
||||||
// Schema defines the schema for the resource.
|
// Schema defines the schema for the resource.
|
||||||
func (r *instanceResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
|
func (r *instanceResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
|
||||||
schema := postgresflexalpha.InstanceResourceSchema(ctx)
|
schemaVar := postgresflexalpha.InstanceResourceSchema(ctx)
|
||||||
|
schemaVar.Attributes["flavor"] = schema.SingleNestedAttribute{
|
||||||
|
Optional: true,
|
||||||
|
DeprecationMessage: "Please use flavor_id instead.",
|
||||||
|
Attributes: map[string]schema.Attribute{
|
||||||
|
"id": schema.StringAttribute{
|
||||||
|
Computed: true,
|
||||||
|
PlanModifiers: []planmodifier.String{
|
||||||
|
UseStateForUnknownIfFlavorUnchanged(req),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"description": schema.StringAttribute{
|
||||||
|
Computed: true,
|
||||||
|
PlanModifiers: []planmodifier.String{
|
||||||
|
UseStateForUnknownIfFlavorUnchanged(req),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"cpu": schema.Int64Attribute{
|
||||||
|
DeprecationMessage: "Please use flavor_id instead.",
|
||||||
|
Optional: true,
|
||||||
|
},
|
||||||
|
"ram": schema.Int64Attribute{
|
||||||
|
DeprecationMessage: "Please use flavor_id instead.",
|
||||||
|
Optional: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
schemaVar.Attributes["flavor_id"] = schema.StringAttribute{
|
||||||
|
Optional: true,
|
||||||
|
Description: "The id of the instance flavor.",
|
||||||
|
MarkdownDescription: "The id of the instance flavor.",
|
||||||
|
}
|
||||||
|
|
||||||
fields, err := utils.ReadModifiersConfig(modifiersFileByte)
|
fields, err := utils.ReadModifiersConfig(modifiersFileByte)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
resp.Diagnostics.AddError("error during read modifiers config file", err.Error())
|
resp.Diagnostics.AddError("error during read modifiers config file", err.Error())
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
err = utils.AddPlanModifiersToResourceSchema(fields, &schema)
|
err = utils.AddPlanModifiersToResourceSchema(fields, &schemaVar)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
resp.Diagnostics.AddError("error adding plan modifiers", err.Error())
|
resp.Diagnostics.AddError("error adding plan modifiers", err.Error())
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
resp.Schema = schema
|
resp.Schema = schemaVar
|
||||||
}
|
|
||||||
|
|
||||||
func (r *instanceResource) IdentitySchema(
|
|
||||||
_ context.Context,
|
|
||||||
_ resource.IdentitySchemaRequest,
|
|
||||||
resp *resource.IdentitySchemaResponse,
|
|
||||||
) {
|
|
||||||
resp.IdentitySchema = identityschema.Schema{
|
|
||||||
Attributes: map[string]identityschema.Attribute{
|
|
||||||
"project_id": identityschema.StringAttribute{
|
|
||||||
RequiredForImport: true, // must be set during import by the practitioner
|
|
||||||
},
|
|
||||||
"region": identityschema.StringAttribute{
|
|
||||||
RequiredForImport: true, // can be defaulted by the provider configuration
|
|
||||||
},
|
|
||||||
"instance_id": identityschema.StringAttribute{
|
|
||||||
RequiredForImport: true, // can be defaulted by the provider configuration
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create creates the resource and sets the initial Terraform state.
|
// Create creates the resource and sets the initial Terraform state.
|
||||||
|
|
@ -186,7 +221,7 @@ func (r *instanceResource) Create(
|
||||||
req resource.CreateRequest,
|
req resource.CreateRequest,
|
||||||
resp *resource.CreateResponse,
|
resp *resource.CreateResponse,
|
||||||
) { // nolint:gocritic // function signature required by Terraform
|
) { // nolint:gocritic // function signature required by Terraform
|
||||||
var model resourceModel
|
var model LocalInstanceModel
|
||||||
|
|
||||||
diags := req.Plan.Get(ctx, &model)
|
diags := req.Plan.Get(ctx, &model)
|
||||||
resp.Diagnostics.Append(diags...)
|
resp.Diagnostics.Append(diags...)
|
||||||
|
|
@ -201,15 +236,82 @@ func (r *instanceResource) Create(
|
||||||
ctx = tflog.SetField(ctx, "project_id", projectID)
|
ctx = tflog.SetField(ctx, "project_id", projectID)
|
||||||
ctx = tflog.SetField(ctx, "region", region)
|
ctx = tflog.SetField(ctx, "region", region)
|
||||||
|
|
||||||
var netAcl []string
|
var netACL []string
|
||||||
diag := model.Network.Acl.ElementsAs(ctx, &netAcl, false)
|
diag := model.Network.Acl.ElementsAs(ctx, &netACL, false)
|
||||||
resp.Diagnostics.Append(diags...)
|
resp.Diagnostics.Append(diags...)
|
||||||
if diag.HasError() {
|
if diag.HasError() {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// determine flavor ID
|
||||||
|
var flModel = &flavorModel{}
|
||||||
|
if !(model.Flavor.IsNull() || model.Flavor.IsUnknown()) {
|
||||||
|
diags = model.Flavor.As(ctx, flModel, basetypes.ObjectAsOptions{})
|
||||||
|
resp.Diagnostics.Append(diags...)
|
||||||
|
if resp.Diagnostics.HasError() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
flavors, err := getAllFlavors(ctx, r.client.DefaultAPI, projectID, region)
|
||||||
|
if err != nil {
|
||||||
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error reading flavors", fmt.Sprintf("getAllFlavors: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
tflog.Debug(ctx, fmt.Sprintf("loaded flavors: %d", len(flavors)))
|
||||||
|
|
||||||
|
var foundFlavors []v3alpha1api.ListFlavors
|
||||||
|
for _, flavor := range flavors {
|
||||||
|
if flModel.CPU.ValueInt64() != int64(flavor.Cpu) {
|
||||||
|
// tflog.Debug(ctx, fmt.Sprintf("flavor - cpu did not match (%d - %d)", flModel.CPU.ValueInt64(), flavor.Cpu))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if flModel.RAM.ValueInt64() != int64(flavor.Memory) {
|
||||||
|
// tflog.Debug(ctx, fmt.Sprintf("flavor - ram did not match (%d - %d)", flModel.RAM.ValueInt64(), flavor.Memory))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
tmpNodeType := "Single"
|
||||||
|
if model.Replicas.ValueInt64() > 1 {
|
||||||
|
tmpNodeType = "Replica"
|
||||||
|
}
|
||||||
|
if strings.ToLower(tmpNodeType) != strings.ToLower(flavor.NodeType) {
|
||||||
|
//tflog.Debug(
|
||||||
|
// ctx,
|
||||||
|
// fmt.Sprintf(
|
||||||
|
// "flavor - nodeType did not match ('%s' - '%s')",
|
||||||
|
// strings.ToLower(tmpNodeType),
|
||||||
|
// strings.ToLower(flavor.NodeType),
|
||||||
|
// ),
|
||||||
|
//)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
tflog.Debug(ctx, fmt.Sprintf("found flavor %s, checking storage classes", flavor.Id))
|
||||||
|
for _, sc := range flavor.StorageClasses {
|
||||||
|
if model.Storage.PerformanceClass.ValueString() != sc.Class {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
tflog.Debug(ctx, fmt.Sprintf("found storage class '%s' for flavor '%s', checking storage classes", sc.Class, flavor.Id))
|
||||||
|
foundFlavors = append(foundFlavors, flavor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(foundFlavors) == 0 {
|
||||||
|
resp.Diagnostics.AddError("get flavor", "could not find requested flavor")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if len(foundFlavors) > 1 {
|
||||||
|
resp.Diagnostics.AddError("get flavor", "found too many matching flavors")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
f := foundFlavors[0]
|
||||||
|
flModel.Description = types.StringValue(f.Description)
|
||||||
|
flModel.Id = utils.BuildInternalTerraformId(model.ProjectId.ValueString(), region, f.Id)
|
||||||
|
model.FlavorId = types.StringValue(f.Id)
|
||||||
|
//flModel. .MaxGb = types.Int32Value(f.MaxGB)
|
||||||
|
//flModel.MinGb = types.Int32Value(f.MinGB)
|
||||||
|
}
|
||||||
|
|
||||||
replVal := model.Replicas.ValueInt64() // nolint:gosec // check is performed above
|
replVal := model.Replicas.ValueInt64() // nolint:gosec // check is performed above
|
||||||
payload := modelToCreateInstancePayload(netAcl, model, replVal)
|
payload := modelToCreateInstancePayload(netACL, model, replVal)
|
||||||
|
|
||||||
// Create new instance
|
// Create new instance
|
||||||
createResp, err := r.client.DefaultAPI.CreateInstanceRequest(
|
createResp, err := r.client.DefaultAPI.CreateInstanceRequest(
|
||||||
|
|
@ -229,18 +331,18 @@ func (r *instanceResource) Create(
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set data returned by API in identity
|
// Set data returned by API in id
|
||||||
identity := InstanceResourceIdentityModel{
|
resp.Diagnostics.Append(
|
||||||
ProjectID: types.StringValue(projectID),
|
resp.State.SetAttribute(
|
||||||
Region: types.StringValue(region),
|
ctx,
|
||||||
InstanceID: types.StringPointerValue(instanceID),
|
path.Root("id"),
|
||||||
}
|
utils.BuildInternalTerraformId(projectID, region, *instanceID),
|
||||||
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
|
)...,
|
||||||
if resp.Diagnostics.HasError() {
|
)
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
waitResp, err := wait.CreateInstanceWaitHandler(ctx, r.client.DefaultAPI, projectID, region, *instanceID).
|
waitResp, err := wait.CreateInstanceWaitHandler(ctx, r.client.DefaultAPI, projectID, region, *instanceID).
|
||||||
|
SetTimeout(90 * time.Minute).
|
||||||
|
SetSleepBeforeWait(10 * time.Second).
|
||||||
WaitWithContext(ctx)
|
WaitWithContext(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
core.LogAndAddError(
|
core.LogAndAddError(
|
||||||
|
|
@ -274,7 +376,7 @@ func (r *instanceResource) Create(
|
||||||
|
|
||||||
func modelToCreateInstancePayload(
|
func modelToCreateInstancePayload(
|
||||||
netACL []string,
|
netACL []string,
|
||||||
model postgresflexalpha.InstanceModel,
|
model LocalInstanceModel,
|
||||||
replVal int64,
|
replVal int64,
|
||||||
) v3alpha1api.CreateInstanceRequestPayload {
|
) v3alpha1api.CreateInstanceRequestPayload {
|
||||||
var enc *v3alpha1api.InstanceEncryption
|
var enc *v3alpha1api.InstanceEncryption
|
||||||
|
|
@ -314,7 +416,7 @@ func (r *instanceResource) Read(
|
||||||
) { // nolint:gocritic // function signature required by Terraform
|
) { // nolint:gocritic // function signature required by Terraform
|
||||||
functionErrorSummary := "read instance failed"
|
functionErrorSummary := "read instance failed"
|
||||||
|
|
||||||
var model resourceModel
|
var model LocalInstanceModel
|
||||||
diags := req.State.Get(ctx, &model)
|
diags := req.State.Get(ctx, &model)
|
||||||
resp.Diagnostics.Append(diags...)
|
resp.Diagnostics.Append(diags...)
|
||||||
if resp.Diagnostics.HasError() {
|
if resp.Diagnostics.HasError() {
|
||||||
|
|
@ -323,9 +425,9 @@ func (r *instanceResource) Read(
|
||||||
|
|
||||||
ctx = core.InitProviderContext(ctx)
|
ctx = core.InitProviderContext(ctx)
|
||||||
|
|
||||||
var projectId string
|
var projectID string
|
||||||
if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() {
|
if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() {
|
||||||
projectId = model.ProjectId.ValueString()
|
projectID = model.ProjectId.ValueString()
|
||||||
}
|
}
|
||||||
|
|
||||||
var region string
|
var region string
|
||||||
|
|
@ -333,16 +435,16 @@ func (r *instanceResource) Read(
|
||||||
region = r.providerData.GetRegionWithOverride(model.Region)
|
region = r.providerData.GetRegionWithOverride(model.Region)
|
||||||
}
|
}
|
||||||
|
|
||||||
var instanceId string
|
var instanceID string
|
||||||
if !model.InstanceId.IsNull() && !model.InstanceId.IsUnknown() {
|
if !model.InstanceId.IsNull() && !model.InstanceId.IsUnknown() {
|
||||||
instanceId = model.InstanceId.ValueString()
|
instanceID = model.InstanceId.ValueString()
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx = tflog.SetField(ctx, "project_id", projectId)
|
ctx = tflog.SetField(ctx, "project_id", projectID)
|
||||||
ctx = tflog.SetField(ctx, "instance_id", instanceId)
|
ctx = tflog.SetField(ctx, "instance_id", instanceID)
|
||||||
ctx = tflog.SetField(ctx, "region", region)
|
ctx = tflog.SetField(ctx, "region", region)
|
||||||
|
|
||||||
instanceResp, err := r.client.DefaultAPI.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
|
instanceResp, err := r.client.DefaultAPI.GetInstanceRequest(ctx, projectID, region, instanceID).Execute()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
|
oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
|
||||||
if ok && oapiErr.StatusCode == http.StatusNotFound {
|
if ok && oapiErr.StatusCode == http.StatusNotFound {
|
||||||
|
|
@ -361,7 +463,7 @@ func (r *instanceResource) Read(
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if !model.InstanceId.IsUnknown() && !model.InstanceId.IsNull() {
|
if !model.InstanceId.IsUnknown() && !model.InstanceId.IsNull() {
|
||||||
if *respInstanceID != instanceId {
|
if *respInstanceID != instanceID {
|
||||||
core.LogAndAddError(
|
core.LogAndAddError(
|
||||||
ctx,
|
ctx,
|
||||||
&resp.Diagnostics,
|
&resp.Diagnostics,
|
||||||
|
|
@ -372,6 +474,10 @@ func (r *instanceResource) Read(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if model.Id.IsUnknown() || model.Id.IsNull() {
|
||||||
|
model.Id = utils.BuildInternalTerraformId(projectID, region, instanceID)
|
||||||
|
}
|
||||||
|
|
||||||
err = mapGetInstanceResponseToModel(ctx, &model, instanceResp)
|
err = mapGetInstanceResponseToModel(ctx, &model, instanceResp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
core.LogAndAddError(
|
core.LogAndAddError(
|
||||||
|
|
@ -389,17 +495,6 @@ func (r *instanceResource) Read(
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set data returned by API in identity
|
|
||||||
identity := InstanceResourceIdentityModel{
|
|
||||||
ProjectID: types.StringValue(projectId),
|
|
||||||
Region: types.StringValue(region),
|
|
||||||
InstanceID: types.StringValue(instanceId),
|
|
||||||
}
|
|
||||||
resp.Diagnostics.Append(resp.Identity.Set(ctx, identity)...)
|
|
||||||
if resp.Diagnostics.HasError() {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
tflog.Info(ctx, "Postgres Flex instance read")
|
tflog.Info(ctx, "Postgres Flex instance read")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -409,7 +504,7 @@ func (r *instanceResource) Update(
|
||||||
req resource.UpdateRequest,
|
req resource.UpdateRequest,
|
||||||
resp *resource.UpdateResponse,
|
resp *resource.UpdateResponse,
|
||||||
) { // nolint:gocritic // function signature required by Terraform
|
) { // nolint:gocritic // function signature required by Terraform
|
||||||
var model resourceModel
|
var model LocalInstanceModel
|
||||||
|
|
||||||
diags := req.Plan.Get(ctx, &model)
|
diags := req.Plan.Get(ctx, &model)
|
||||||
resp.Diagnostics.Append(diags...)
|
resp.Diagnostics.Append(diags...)
|
||||||
|
|
@ -419,15 +514,8 @@ func (r *instanceResource) Update(
|
||||||
|
|
||||||
ctx = core.InitProviderContext(ctx)
|
ctx = core.InitProviderContext(ctx)
|
||||||
|
|
||||||
// Read identity data
|
projectID := model.ProjectId.ValueString()
|
||||||
var identityData InstanceResourceIdentityModel
|
instanceID := model.InstanceId.ValueString()
|
||||||
resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
|
|
||||||
if resp.Diagnostics.HasError() {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
projectID := identityData.ProjectID.ValueString()
|
|
||||||
instanceID := identityData.InstanceID.ValueString()
|
|
||||||
region := model.Region.ValueString()
|
region := model.Region.ValueString()
|
||||||
ctx = tflog.SetField(ctx, "project_id", projectID)
|
ctx = tflog.SetField(ctx, "project_id", projectID)
|
||||||
ctx = tflog.SetField(ctx, "instance_id", instanceID)
|
ctx = tflog.SetField(ctx, "instance_id", instanceID)
|
||||||
|
|
@ -490,7 +578,10 @@ func (r *instanceResource) Update(
|
||||||
projectID,
|
projectID,
|
||||||
region,
|
region,
|
||||||
instanceID,
|
instanceID,
|
||||||
).WaitWithContext(ctx)
|
).
|
||||||
|
SetTimeout(90 * time.Minute).
|
||||||
|
SetSleepBeforeWait(10 * time.Second).
|
||||||
|
WaitWithContext(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
core.LogAndAddError(
|
core.LogAndAddError(
|
||||||
ctx,
|
ctx,
|
||||||
|
|
@ -526,7 +617,7 @@ func (r *instanceResource) Delete(
|
||||||
req resource.DeleteRequest,
|
req resource.DeleteRequest,
|
||||||
resp *resource.DeleteResponse,
|
resp *resource.DeleteResponse,
|
||||||
) { // nolint:gocritic // function signature required by Terraform
|
) { // nolint:gocritic // function signature required by Terraform
|
||||||
var model resourceModel
|
var model LocalInstanceModel
|
||||||
|
|
||||||
diags := req.State.Get(ctx, &model)
|
diags := req.State.Get(ctx, &model)
|
||||||
resp.Diagnostics.Append(diags...)
|
resp.Diagnostics.Append(diags...)
|
||||||
|
|
@ -536,15 +627,15 @@ func (r *instanceResource) Delete(
|
||||||
|
|
||||||
ctx = core.InitProviderContext(ctx)
|
ctx = core.InitProviderContext(ctx)
|
||||||
|
|
||||||
projectId := model.ProjectId.ValueString()
|
projectID := model.ProjectId.ValueString()
|
||||||
instanceId := model.InstanceId.ValueString()
|
instanceID := model.InstanceId.ValueString()
|
||||||
region := model.Region.ValueString()
|
region := model.Region.ValueString()
|
||||||
ctx = tflog.SetField(ctx, "project_id", projectId)
|
ctx = tflog.SetField(ctx, "project_id", projectID)
|
||||||
ctx = tflog.SetField(ctx, "instance_id", instanceId)
|
ctx = tflog.SetField(ctx, "instance_id", instanceID)
|
||||||
ctx = tflog.SetField(ctx, "region", region)
|
ctx = tflog.SetField(ctx, "region", region)
|
||||||
|
|
||||||
// Delete existing instance
|
// Delete existing instance
|
||||||
err := r.client.DefaultAPI.DeleteInstanceRequest(ctx, projectId, region, instanceId).Execute()
|
err := r.client.DefaultAPI.DeleteInstanceRequest(ctx, projectID, region, instanceID).Execute()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting instance", fmt.Sprintf("Calling API: %v", err))
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error deleting instance", fmt.Sprintf("Calling API: %v", err))
|
||||||
return
|
return
|
||||||
|
|
@ -552,7 +643,7 @@ func (r *instanceResource) Delete(
|
||||||
|
|
||||||
ctx = core.LogResponse(ctx)
|
ctx = core.LogResponse(ctx)
|
||||||
|
|
||||||
_, err = r.client.DefaultAPI.GetInstanceRequest(ctx, projectId, region, instanceId).Execute()
|
_, err = r.client.DefaultAPI.GetInstanceRequest(ctx, projectID, region, instanceID).Execute()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
|
oapiErr, ok := err.(*oapierror.GenericOpenAPIError) //nolint:errorlint //complaining that error.As should be used to catch wrapped errors, but this error should not be wrapped
|
||||||
if ok && oapiErr.StatusCode != http.StatusNotFound {
|
if ok && oapiErr.StatusCode != http.StatusNotFound {
|
||||||
|
|
@ -574,41 +665,30 @@ func (r *instanceResource) ImportState(
|
||||||
) {
|
) {
|
||||||
ctx = core.InitProviderContext(ctx)
|
ctx = core.InitProviderContext(ctx)
|
||||||
|
|
||||||
if req.ID != "" {
|
idParts := strings.Split(req.ID, core.Separator)
|
||||||
idParts := strings.Split(req.ID, core.Separator)
|
|
||||||
|
|
||||||
if len(idParts) != 3 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" {
|
if len(idParts) != 3 || idParts[0] == "" || idParts[1] == "" || idParts[2] == "" {
|
||||||
core.LogAndAddError(
|
core.LogAndAddError(
|
||||||
ctx, &resp.Diagnostics,
|
ctx, &resp.Diagnostics,
|
||||||
"Error importing instance",
|
"Error importing instance",
|
||||||
fmt.Sprintf(
|
fmt.Sprintf(
|
||||||
"Expected import identifier with format [project_id],[region],[instance_id] Got: %q",
|
"Expected import identifier with format [project_id],[region],[instance_id] Got: %q",
|
||||||
req.ID,
|
req.ID,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
|
|
||||||
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
|
|
||||||
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// If no ID is provided, attempt to read identity attributes from the import configuration
|
resp.Diagnostics.Append(
|
||||||
var identityData InstanceResourceIdentityModel
|
resp.State.SetAttribute(
|
||||||
resp.Diagnostics.Append(req.Identity.Get(ctx, &identityData)...)
|
ctx,
|
||||||
if resp.Diagnostics.HasError() {
|
path.Root("id"),
|
||||||
return
|
utils.BuildInternalTerraformId(idParts...),
|
||||||
}
|
)...,
|
||||||
|
)
|
||||||
projectId := identityData.ProjectID.ValueString()
|
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), idParts[0])...)
|
||||||
region := identityData.Region.ValueString()
|
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), idParts[1])...)
|
||||||
instanceId := identityData.InstanceID.ValueString()
|
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), idParts[2])...)
|
||||||
|
|
||||||
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project_id"), projectId)...)
|
|
||||||
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("region"), region)...)
|
|
||||||
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance_id"), instanceId)...)
|
|
||||||
|
|
||||||
tflog.Info(ctx, "Postgres Flex instance state imported")
|
tflog.Info(ctx, "Postgres Flex instance state imported")
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -119,6 +119,13 @@ func InstanceResourceSchema(ctx context.Context) schema.Schema {
|
||||||
Description: "Whether the instance can be deleted or not.",
|
Description: "Whether the instance can be deleted or not.",
|
||||||
MarkdownDescription: "Whether the instance can be deleted or not.",
|
MarkdownDescription: "Whether the instance can be deleted or not.",
|
||||||
},
|
},
|
||||||
|
//"labels": schema.MapAttribute{
|
||||||
|
// ElementType: types.StringType,
|
||||||
|
// Optional: true,
|
||||||
|
// Computed: true,
|
||||||
|
// Description: "Key-value pairs, 63 characters max, begin and end with an alphanumerical character,\nmay contain dashes (-), underscores (_), dots (.), and alphanumerics between. Key MUST be at least 1 character.\nMax 64 labels\nRegex for keys: ^(?=.{1,63}$)([A-Za-z0-9][-A-Za-z0-9_.]*)?[A-Za-z0-9]$\nRegex for values: ^(?=.{0,63}$)(([A-Za-z0-9][-A-Za-z0-9_.]*)?[A-Za-z0-9])*$\nThe stackit- prefix is reserved and cannot be used for Keys.\n",
|
||||||
|
// MarkdownDescription: "Key-value pairs, 63 characters max, begin and end with an alphanumerical character,\nmay contain dashes (-), underscores (_), dots (.), and alphanumerics between. Key MUST be at least 1 character.\nMax 64 labels\nRegex for keys: ^(?=.{1,63}$)([A-Za-z0-9][-A-Za-z0-9_.]*)?[A-Za-z0-9]$\nRegex for values: ^(?=.{0,63}$)(([A-Za-z0-9][-A-Za-z0-9_.]*)?[A-Za-z0-9])*$\nThe stackit- prefix is reserved and cannot be used for Keys.\n",
|
||||||
|
//},
|
||||||
"name": schema.StringAttribute{
|
"name": schema.StringAttribute{
|
||||||
Required: true,
|
Required: true,
|
||||||
Description: "The name of the instance.",
|
Description: "The name of the instance.",
|
||||||
|
|
@ -191,8 +198,8 @@ func InstanceResourceSchema(ctx context.Context) schema.Schema {
|
||||||
},
|
},
|
||||||
"retention_days": schema.Int64Attribute{
|
"retention_days": schema.Int64Attribute{
|
||||||
Required: true,
|
Required: true,
|
||||||
Description: "How long backups are retained. The value can only be between 32 and 365 days.",
|
Description: "How long backups are retained. The value can only be between 32 and 90 days.",
|
||||||
MarkdownDescription: "How long backups are retained. The value can only be between 32 and 365 days.",
|
MarkdownDescription: "How long backups are retained. The value can only be between 32 and 90 days.",
|
||||||
},
|
},
|
||||||
"status": schema.StringAttribute{
|
"status": schema.StringAttribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
|
|
@ -239,15 +246,16 @@ type InstanceModel struct {
|
||||||
Id types.String `tfsdk:"id"`
|
Id types.String `tfsdk:"id"`
|
||||||
InstanceId types.String `tfsdk:"instance_id"`
|
InstanceId types.String `tfsdk:"instance_id"`
|
||||||
IsDeletable types.Bool `tfsdk:"is_deletable"`
|
IsDeletable types.Bool `tfsdk:"is_deletable"`
|
||||||
Name types.String `tfsdk:"name"`
|
//Labels types.Map `tfsdk:"labels"`
|
||||||
Network NetworkValue `tfsdk:"network"`
|
Name types.String `tfsdk:"name"`
|
||||||
ProjectId types.String `tfsdk:"project_id"`
|
Network NetworkValue `tfsdk:"network"`
|
||||||
Region types.String `tfsdk:"region"`
|
ProjectId types.String `tfsdk:"project_id"`
|
||||||
Replicas types.Int64 `tfsdk:"replicas"`
|
Region types.String `tfsdk:"region"`
|
||||||
RetentionDays types.Int64 `tfsdk:"retention_days"`
|
Replicas types.Int64 `tfsdk:"replicas"`
|
||||||
Status types.String `tfsdk:"status"`
|
RetentionDays types.Int64 `tfsdk:"retention_days"`
|
||||||
Storage StorageValue `tfsdk:"storage"`
|
Status types.String `tfsdk:"status"`
|
||||||
Version types.String `tfsdk:"version"`
|
Storage StorageValue `tfsdk:"storage"`
|
||||||
|
Version types.String `tfsdk:"version"`
|
||||||
}
|
}
|
||||||
|
|
||||||
var _ basetypes.ObjectTypable = ConnectionInfoType{}
|
var _ basetypes.ObjectTypable = ConnectionInfoType{}
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,85 @@
|
||||||
|
package postgresflexalpha
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/hashicorp/terraform-plugin-framework/resource"
|
||||||
|
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
|
||||||
|
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
|
||||||
|
)
|
||||||
|
|
||||||
|
type useStateForUnknownIfFlavorUnchangedModifier struct {
|
||||||
|
Req resource.SchemaRequest
|
||||||
|
}
|
||||||
|
|
||||||
|
// UseStateForUnknownIfFlavorUnchanged returns a plan modifier similar to UseStateForUnknown
|
||||||
|
// if the RAM and CPU values are not changed in the plan. Otherwise, the plan modifier does nothing.
|
||||||
|
func UseStateForUnknownIfFlavorUnchanged(req resource.SchemaRequest) planmodifier.String {
|
||||||
|
return useStateForUnknownIfFlavorUnchangedModifier{
|
||||||
|
Req: req,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m useStateForUnknownIfFlavorUnchangedModifier) Description(context.Context) string {
|
||||||
|
return "UseStateForUnknownIfFlavorUnchanged returns a plan modifier similar to UseStateForUnknown if the RAM and CPU values are not changed in the plan. Otherwise, the plan modifier does nothing."
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m useStateForUnknownIfFlavorUnchangedModifier) MarkdownDescription(ctx context.Context) string {
|
||||||
|
return m.Description(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m useStateForUnknownIfFlavorUnchangedModifier) PlanModifyString(ctx context.Context, req planmodifier.StringRequest, resp *planmodifier.StringResponse) { // nolint:gocritic // function signature required by Terraform
|
||||||
|
// Do nothing if there is no state value.
|
||||||
|
if req.StateValue.IsNull() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Do nothing if there is a known planned value.
|
||||||
|
if !req.PlanValue.IsUnknown() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Do nothing if there is an unknown configuration value, otherwise interpolation gets messed up.
|
||||||
|
if req.ConfigValue.IsUnknown() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// The above checks are taken from the UseStateForUnknown plan modifier implementation
|
||||||
|
// (https://github.com/hashicorp/terraform-plugin-framework/blob/main/resource/schema/stringplanmodifier/use_state_for_unknown.go#L38)
|
||||||
|
|
||||||
|
var stateModel LocalInstanceModel
|
||||||
|
diags := req.State.Get(ctx, &stateModel)
|
||||||
|
resp.Diagnostics.Append(diags...)
|
||||||
|
if resp.Diagnostics.HasError() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var stateFlavor = &flavorModel{}
|
||||||
|
if !(stateModel.Flavor.IsNull() || stateModel.Flavor.IsUnknown()) {
|
||||||
|
diags = stateModel.Flavor.As(ctx, stateFlavor, basetypes.ObjectAsOptions{})
|
||||||
|
resp.Diagnostics.Append(diags...)
|
||||||
|
if resp.Diagnostics.HasError() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var planModel LocalInstanceModel
|
||||||
|
diags = req.Plan.Get(ctx, &planModel)
|
||||||
|
resp.Diagnostics.Append(diags...)
|
||||||
|
if resp.Diagnostics.HasError() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var planFlavor = &flavorModel{}
|
||||||
|
if !(planModel.Flavor.IsNull() || planModel.Flavor.IsUnknown()) {
|
||||||
|
diags = planModel.Flavor.As(ctx, planFlavor, basetypes.ObjectAsOptions{})
|
||||||
|
resp.Diagnostics.Append(diags...)
|
||||||
|
if resp.Diagnostics.HasError() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if planFlavor.CPU == stateFlavor.CPU && planFlavor.RAM == stateFlavor.RAM {
|
||||||
|
resp.PlanValue = req.StateValue
|
||||||
|
}
|
||||||
|
}
|
||||||
File diff suppressed because it is too large
Load diff
|
|
@ -23,16 +23,21 @@ resource "stackitprivatepreview_postgresflexalpha_instance" "{{ .TfName }}" {
|
||||||
}
|
}
|
||||||
{{ end }}
|
{{ end }}
|
||||||
network = {
|
network = {
|
||||||
acl = ["{{ .ACLString }}"]
|
acl = [{{ range $i, $v := .ACLStrings }}{{if $i}},{{end}}"{{$v}}"{{end}}]
|
||||||
access_scope = "{{ .AccessScope }}"
|
access_scope = "{{ .AccessScope }}"
|
||||||
}
|
}
|
||||||
version = {{ .Version }}
|
{{ if .Version }}
|
||||||
|
version = "{{ .Version }}"
|
||||||
|
{{ end }}
|
||||||
}
|
}
|
||||||
|
|
||||||
{{ if .Users }}
|
{{ if .Users }}
|
||||||
{{ $tfName := .TfName }}
|
{{ $tfName := .TfName }}
|
||||||
{{ range $user := .Users }}
|
{{ range $user := .Users }}
|
||||||
resource "stackitprivatepreview_postgresflexalpha_user" "{{ $user.Name }}" {
|
resource "stackitprivatepreview_postgresflexalpha_user" "{{ $user.Name }}" {
|
||||||
|
depends_on = [
|
||||||
|
stackitprivatepreview_postgresflexalpha_instance.{{ $tfName }}
|
||||||
|
]
|
||||||
project_id = "{{ $user.ProjectID }}"
|
project_id = "{{ $user.ProjectID }}"
|
||||||
instance_id = stackitprivatepreview_postgresflexalpha_instance.{{ $tfName }}.instance_id
|
instance_id = stackitprivatepreview_postgresflexalpha_instance.{{ $tfName }}.instance_id
|
||||||
name = "{{ $user.Name }}"
|
name = "{{ $user.Name }}"
|
||||||
|
|
@ -45,6 +50,10 @@ resource "stackitprivatepreview_postgresflexalpha_user" "{{ $user.Name }}" {
|
||||||
{{ $tfName := .TfName }}
|
{{ $tfName := .TfName }}
|
||||||
{{ range $db := .Databases }}
|
{{ range $db := .Databases }}
|
||||||
resource "stackitprivatepreview_postgresflexalpha_database" "{{ $db.Name }}" {
|
resource "stackitprivatepreview_postgresflexalpha_database" "{{ $db.Name }}" {
|
||||||
|
depends_on = [
|
||||||
|
stackitprivatepreview_postgresflexalpha_instance.{{ $tfName }},
|
||||||
|
stackitprivatepreview_postgresflexalpha_user.{{ $db.Owner }}
|
||||||
|
]
|
||||||
project_id = "{{ $db.ProjectID }}"
|
project_id = "{{ $db.ProjectID }}"
|
||||||
instance_id = stackitprivatepreview_postgresflexalpha_instance.{{ $tfName }}.instance_id
|
instance_id = stackitprivatepreview_postgresflexalpha_instance.{{ $tfName }}.instance_id
|
||||||
name = "{{ $db.Name }}"
|
name = "{{ $db.Name }}"
|
||||||
|
|
@ -52,3 +61,32 @@ resource "stackitprivatepreview_postgresflexalpha_database" "{{ $db.Name }}" {
|
||||||
}
|
}
|
||||||
{{ end }}
|
{{ end }}
|
||||||
{{ end }}
|
{{ end }}
|
||||||
|
|
||||||
|
{{ if .DataSourceTest }}
|
||||||
|
data "stackitprivatepreview_postgresflexalpha_instance" "{{ .TfName }}" {
|
||||||
|
project_id = stackitprivatepreview_postgresflexalpha_instance.{{ .TfName }}.project_id
|
||||||
|
instance_id = stackitprivatepreview_postgresflexalpha_instance.{{ .TfName }}.instance_id
|
||||||
|
}
|
||||||
|
|
||||||
|
{{ if .Users }}
|
||||||
|
{{ $tfName := .TfName }}
|
||||||
|
{{ range $user := .Users }}
|
||||||
|
data "stackitprivatepreview_postgresflexalpha_user" "{{ $user.Name }}" {
|
||||||
|
project_id = stackitprivatepreview_postgresflexalpha_instance.{{ $tfName }}.project_id
|
||||||
|
instance_id = stackitprivatepreview_postgresflexalpha_instance.{{ $tfName }}.instance_id
|
||||||
|
user_id = stackitprivatepreview_postgresflexalpha_user.{{ $user.Name }}.user_id
|
||||||
|
}
|
||||||
|
{{ end }}
|
||||||
|
{{ end }}
|
||||||
|
|
||||||
|
{{ if .Databases }}
|
||||||
|
{{ $tfName := .TfName }}
|
||||||
|
{{ range $db := .Databases }}
|
||||||
|
data "stackitprivatepreview_postgresflexalpha_database" "{{ $db.Name }}" {
|
||||||
|
project_id = stackitprivatepreview_postgresflexalpha_instance.{{ $tfName }}.project_id
|
||||||
|
instance_id = stackitprivatepreview_postgresflexalpha_instance.{{ $tfName }}.instance_id
|
||||||
|
database_id = stackitprivatepreview_postgresflexalpha_database.{{ $db.Name }}.database_id
|
||||||
|
}
|
||||||
|
{{ end }}
|
||||||
|
{{ end }}
|
||||||
|
{{ end }}
|
||||||
|
|
|
||||||
101
stackit/internal/services/postgresflexalpha/testdata/instance_template_with_flavor.gompl
vendored
Normal file
101
stackit/internal/services/postgresflexalpha/testdata/instance_template_with_flavor.gompl
vendored
Normal file
|
|
@ -0,0 +1,101 @@
|
||||||
|
provider "stackitprivatepreview" {
|
||||||
|
default_region = "{{ .Region }}"
|
||||||
|
service_account_key_path = "{{ .ServiceAccountFilePath }}"
|
||||||
|
}
|
||||||
|
|
||||||
|
data "stackitprivatepreview_postgresflexalpha_flavor" "flavor" {
|
||||||
|
project_id = "{{ .ProjectID }}"
|
||||||
|
region = "{{ .Region }}"
|
||||||
|
cpu = 4
|
||||||
|
ram = 16
|
||||||
|
node_type = "Single"
|
||||||
|
storage_class = "premium-perf2-stackit"
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "stackitprivatepreview_postgresflexalpha_instance" "{{ .TfName }}" {
|
||||||
|
project_id = "{{ .ProjectID }}"
|
||||||
|
name = "{{ .Name }}"
|
||||||
|
backup_schedule = "{{ .BackupSchedule }}"
|
||||||
|
retention_days = {{ .RetentionDays }}
|
||||||
|
flavor_id = "{{ .FlavorID }}"
|
||||||
|
replicas = {{ .Replicas }}
|
||||||
|
storage = {
|
||||||
|
performance_class = "{{ .PerformanceClass }}"
|
||||||
|
size = {{ .Size }}
|
||||||
|
}
|
||||||
|
{{ if .UseEncryption }}
|
||||||
|
encryption = {
|
||||||
|
kek_key_id = "{{ .KekKeyID }}"
|
||||||
|
kek_key_ring_id = "{{ .KekKeyRingID }}"
|
||||||
|
kek_key_version = {{ .KekKeyVersion }}
|
||||||
|
service_account = "{{ .KekServiceAccount }}"
|
||||||
|
}
|
||||||
|
{{ end }}
|
||||||
|
network = {
|
||||||
|
acl = [{{ range $i, $v := .ACLStrings }}{{if $i}},{{end}}"{{$v}}"{{end}}]
|
||||||
|
access_scope = "{{ .AccessScope }}"
|
||||||
|
}
|
||||||
|
{{ if .Version }}
|
||||||
|
version = "{{ .Version }}"
|
||||||
|
{{ end }}
|
||||||
|
}
|
||||||
|
|
||||||
|
{{ if .Users }}
|
||||||
|
{{ $tfName := .TfName }}
|
||||||
|
{{ range $user := .Users }}
|
||||||
|
resource "stackitprivatepreview_postgresflexalpha_user" "{{ $user.Name }}" {
|
||||||
|
depends_on = [
|
||||||
|
stackitprivatepreview_postgresflexalpha_instance.{{ $tfName }}
|
||||||
|
]
|
||||||
|
project_id = "{{ $user.ProjectID }}"
|
||||||
|
instance_id = stackitprivatepreview_postgresflexalpha_instance.{{ $tfName }}.instance_id
|
||||||
|
name = "{{ $user.Name }}"
|
||||||
|
roles = [{{ range $i, $v := $user.Roles }}{{if $i}},{{end}}"{{$v}}"{{end}}]
|
||||||
|
}
|
||||||
|
{{ end }}
|
||||||
|
{{ end }}
|
||||||
|
|
||||||
|
{{ if .Databases }}
|
||||||
|
{{ $tfName := .TfName }}
|
||||||
|
{{ range $db := .Databases }}
|
||||||
|
resource "stackitprivatepreview_postgresflexalpha_database" "{{ $db.Name }}" {
|
||||||
|
depends_on = [
|
||||||
|
stackitprivatepreview_postgresflexalpha_instance.{{ $tfName }},
|
||||||
|
stackitprivatepreview_postgresflexalpha_user.{{ $db.Owner }}
|
||||||
|
]
|
||||||
|
project_id = "{{ $db.ProjectID }}"
|
||||||
|
instance_id = stackitprivatepreview_postgresflexalpha_instance.{{ $tfName }}.instance_id
|
||||||
|
name = "{{ $db.Name }}"
|
||||||
|
owner = stackitprivatepreview_postgresflexalpha_user.{{ $db.Owner }}.name
|
||||||
|
}
|
||||||
|
{{ end }}
|
||||||
|
{{ end }}
|
||||||
|
|
||||||
|
{{ if .DataSourceTest }}
|
||||||
|
data "stackitprivatepreview_postgresflexalpha_instance" "{{ .TfName }}" {
|
||||||
|
project_id = stackitprivatepreview_postgresflexalpha_instance.{{ .TfName }}.project_id
|
||||||
|
instance_id = stackitprivatepreview_postgresflexalpha_instance.{{ .TfName }}.instance_id
|
||||||
|
}
|
||||||
|
|
||||||
|
{{ if .Users }}
|
||||||
|
{{ $tfName := .TfName }}
|
||||||
|
{{ range $user := .Users }}
|
||||||
|
data "stackitprivatepreview_postgresflexalpha_user" "{{ $user.Name }}" {
|
||||||
|
project_id = stackitprivatepreview_postgresflexalpha_instance.{{ $tfName }}.project_id
|
||||||
|
instance_id = stackitprivatepreview_postgresflexalpha_instance.{{ $tfName }}.instance_id
|
||||||
|
user_id = stackitprivatepreview_postgresflexalpha_user.{{ $user.Name }}.user_id
|
||||||
|
}
|
||||||
|
{{ end }}
|
||||||
|
{{ end }}
|
||||||
|
|
||||||
|
{{ if .Databases }}
|
||||||
|
{{ $tfName := .TfName }}
|
||||||
|
{{ range $db := .Databases }}
|
||||||
|
data "stackitprivatepreview_postgresflexalpha_database" "{{ $db.Name }}" {
|
||||||
|
project_id = stackitprivatepreview_postgresflexalpha_instance.{{ $tfName }}.project_id
|
||||||
|
instance_id = stackitprivatepreview_postgresflexalpha_instance.{{ $tfName }}.instance_id
|
||||||
|
database_id = stackitprivatepreview_postgresflexalpha_database.{{ $db.Name }}.database_id
|
||||||
|
}
|
||||||
|
{{ end }}
|
||||||
|
{{ end }}
|
||||||
|
{{ end }}
|
||||||
|
|
@ -35,7 +35,7 @@ func UserDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
MarkdownDescription: "The STACKIT project ID.",
|
MarkdownDescription: "The STACKIT project ID.",
|
||||||
},
|
},
|
||||||
"region": schema.StringAttribute{
|
"region": schema.StringAttribute{
|
||||||
Required: true,
|
Optional: true,
|
||||||
Description: "The region which should be addressed",
|
Description: "The region which should be addressed",
|
||||||
MarkdownDescription: "The region which should be addressed",
|
MarkdownDescription: "The region which should be addressed",
|
||||||
Validators: []validator.String{
|
Validators: []validator.String{
|
||||||
|
|
|
||||||
|
|
@ -116,7 +116,12 @@ func mapResourceFields(userResp *v3alpha1api.GetUserResponse, model *resourceMod
|
||||||
return fmt.Errorf("user id not present")
|
return fmt.Errorf("user id not present")
|
||||||
}
|
}
|
||||||
|
|
||||||
model.Id = types.Int64Value(userID)
|
model.Id = utils.BuildInternalTerraformId(
|
||||||
|
model.ProjectId.ValueString(),
|
||||||
|
model.Region.ValueString(),
|
||||||
|
model.InstanceId.ValueString(),
|
||||||
|
strconv.FormatInt(userID, 10),
|
||||||
|
)
|
||||||
model.UserId = types.Int64Value(userID)
|
model.UserId = types.Int64Value(userID)
|
||||||
model.Name = types.StringValue(user.Name)
|
model.Name = types.StringValue(user.Name)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
package postgresflexalpha
|
package postgresflexalpha
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/google/go-cmp/cmp"
|
"github.com/google/go-cmp/cmp"
|
||||||
|
|
@ -165,7 +166,7 @@ func TestMapFieldsCreate(t *testing.T) {
|
||||||
},
|
},
|
||||||
testRegion,
|
testRegion,
|
||||||
resourceModel{
|
resourceModel{
|
||||||
Id: types.Int64Value(1),
|
Id: types.StringValue(fmt.Sprintf("%s,%s,%s,%d", "pid", testRegion, "iid", 1)),
|
||||||
UserId: types.Int64Value(1),
|
UserId: types.Int64Value(1),
|
||||||
InstanceId: types.StringValue("iid"),
|
InstanceId: types.StringValue("iid"),
|
||||||
ProjectId: types.StringValue("pid"),
|
ProjectId: types.StringValue("pid"),
|
||||||
|
|
@ -187,7 +188,7 @@ func TestMapFieldsCreate(t *testing.T) {
|
||||||
},
|
},
|
||||||
testRegion,
|
testRegion,
|
||||||
resourceModel{
|
resourceModel{
|
||||||
Id: types.Int64Value(1),
|
Id: types.StringValue(fmt.Sprintf("%s,%s,%s,%d", "pid", testRegion, "iid", 1)),
|
||||||
UserId: types.Int64Value(1),
|
UserId: types.Int64Value(1),
|
||||||
InstanceId: types.StringValue("iid"),
|
InstanceId: types.StringValue("iid"),
|
||||||
ProjectId: types.StringValue("pid"),
|
ProjectId: types.StringValue("pid"),
|
||||||
|
|
@ -209,7 +210,7 @@ func TestMapFieldsCreate(t *testing.T) {
|
||||||
},
|
},
|
||||||
testRegion,
|
testRegion,
|
||||||
resourceModel{
|
resourceModel{
|
||||||
Id: types.Int64Value(1),
|
Id: types.StringValue(fmt.Sprintf("%s,%s,%s,%d", "pid", testRegion, "iid", 1)),
|
||||||
UserId: types.Int64Value(1),
|
UserId: types.Int64Value(1),
|
||||||
InstanceId: types.StringValue("iid"),
|
InstanceId: types.StringValue("iid"),
|
||||||
ProjectId: types.StringValue("pid"),
|
ProjectId: types.StringValue("pid"),
|
||||||
|
|
@ -249,6 +250,7 @@ func TestMapFieldsCreate(t *testing.T) {
|
||||||
tt.description, func(t *testing.T) {
|
tt.description, func(t *testing.T) {
|
||||||
state := &resourceModel{
|
state := &resourceModel{
|
||||||
ProjectId: tt.expected.ProjectId,
|
ProjectId: tt.expected.ProjectId,
|
||||||
|
Region: types.StringValue(testRegion),
|
||||||
InstanceId: tt.expected.InstanceId,
|
InstanceId: tt.expected.InstanceId,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -286,7 +288,7 @@ func TestMapFields(t *testing.T) {
|
||||||
},
|
},
|
||||||
testRegion,
|
testRegion,
|
||||||
resourceModel{
|
resourceModel{
|
||||||
Id: types.Int64Value(1),
|
Id: types.StringValue(fmt.Sprintf("%s,%s,%s,%d", "pid", testRegion, "iid", 1)),
|
||||||
UserId: types.Int64Value(int64(1)),
|
UserId: types.Int64Value(int64(1)),
|
||||||
InstanceId: types.StringValue("iid"),
|
InstanceId: types.StringValue("iid"),
|
||||||
ProjectId: types.StringValue("pid"),
|
ProjectId: types.StringValue("pid"),
|
||||||
|
|
@ -311,7 +313,7 @@ func TestMapFields(t *testing.T) {
|
||||||
},
|
},
|
||||||
testRegion,
|
testRegion,
|
||||||
resourceModel{
|
resourceModel{
|
||||||
Id: types.Int64Value(1),
|
Id: types.StringValue(fmt.Sprintf("%s,%s,%s,%d", "pid", testRegion, "iid", 1)),
|
||||||
UserId: types.Int64Value(1),
|
UserId: types.Int64Value(1),
|
||||||
InstanceId: types.StringValue("iid"),
|
InstanceId: types.StringValue("iid"),
|
||||||
ProjectId: types.StringValue("pid"),
|
ProjectId: types.StringValue("pid"),
|
||||||
|
|
@ -339,7 +341,7 @@ func TestMapFields(t *testing.T) {
|
||||||
},
|
},
|
||||||
testRegion,
|
testRegion,
|
||||||
resourceModel{
|
resourceModel{
|
||||||
Id: types.Int64Value(1),
|
Id: types.StringValue(fmt.Sprintf("%s,%s,%s,%d", "pid", testRegion, "iid", 1)),
|
||||||
UserId: types.Int64Value(1),
|
UserId: types.Int64Value(1),
|
||||||
InstanceId: types.StringValue("iid"),
|
InstanceId: types.StringValue("iid"),
|
||||||
ProjectId: types.StringValue("pid"),
|
ProjectId: types.StringValue("pid"),
|
||||||
|
|
@ -379,6 +381,7 @@ func TestMapFields(t *testing.T) {
|
||||||
state := &resourceModel{
|
state := &resourceModel{
|
||||||
ProjectId: tt.expected.ProjectId,
|
ProjectId: tt.expected.ProjectId,
|
||||||
InstanceId: tt.expected.InstanceId,
|
InstanceId: tt.expected.InstanceId,
|
||||||
|
Region: types.StringValue(tt.region),
|
||||||
}
|
}
|
||||||
err := mapResourceFields(tt.input, state, tt.region)
|
err := mapResourceFields(tt.input, state, tt.region)
|
||||||
if !tt.isValid && err == nil {
|
if !tt.isValid && err == nil {
|
||||||
|
|
@ -388,7 +391,7 @@ func TestMapFields(t *testing.T) {
|
||||||
t.Fatalf("Should not have failed: %v", err)
|
t.Fatalf("Should not have failed: %v", err)
|
||||||
}
|
}
|
||||||
if tt.isValid {
|
if tt.isValid {
|
||||||
diff := cmp.Diff(state, &tt.expected)
|
diff := cmp.Diff(&tt.expected, state)
|
||||||
if diff != "" {
|
if diff != "" {
|
||||||
t.Fatalf("Data does not match: %s", diff)
|
t.Fatalf("Data does not match: %s", diff)
|
||||||
}
|
}
|
||||||
|
|
@ -476,7 +479,7 @@ func TestToCreatePayload(t *testing.T) {
|
||||||
t.Fatalf("Should not have failed: %v", err)
|
t.Fatalf("Should not have failed: %v", err)
|
||||||
}
|
}
|
||||||
if tt.isValid {
|
if tt.isValid {
|
||||||
diff := cmp.Diff(output, tt.expected)
|
diff := cmp.Diff(tt.expected, output)
|
||||||
if diff != "" {
|
if diff != "" {
|
||||||
t.Fatalf("Data does not match: %s", diff)
|
t.Fatalf("Data does not match: %s", diff)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue