Compare commits
11 commits
v0.0.19-al
...
alpha
| Author | SHA1 | Date | |
|---|---|---|---|
| 3790894563 | |||
| f173fd54fe | |||
| 1033d7e034 | |||
|
|
635a9abf20 | ||
|
|
07458c5677 | ||
|
|
eb13630d2f | ||
| 4a2819787d | |||
|
|
36eccc52c3 | ||
|
|
841e702b95 | ||
|
|
aba831cbdd | ||
|
|
89a24ce780 |
157 changed files with 8720 additions and 5858 deletions
220
.github/actions/acc_test/action.yaml
vendored
220
.github/actions/acc_test/action.yaml
vendored
|
|
@ -2,6 +2,11 @@ name: Acceptance Testing
|
||||||
description: "Acceptance Testing pipeline"
|
description: "Acceptance Testing pipeline"
|
||||||
|
|
||||||
inputs:
|
inputs:
|
||||||
|
test_timeout_string:
|
||||||
|
description: "string that determines the timeout (default: 45m)"
|
||||||
|
default: '45m'
|
||||||
|
required: true
|
||||||
|
|
||||||
go-version:
|
go-version:
|
||||||
description: "go version to install"
|
description: "go version to install"
|
||||||
default: '1.25'
|
default: '1.25'
|
||||||
|
|
@ -11,38 +16,78 @@ inputs:
|
||||||
description: "STACKIT project ID for tests"
|
description: "STACKIT project ID for tests"
|
||||||
required: true
|
required: true
|
||||||
|
|
||||||
|
project_user_email:
|
||||||
|
required: true
|
||||||
|
description: "project user email for acc testing"
|
||||||
|
|
||||||
|
tf_acc_kek_key_id:
|
||||||
|
description: "KEK key ID"
|
||||||
|
required: true
|
||||||
|
|
||||||
|
tf_acc_kek_key_ring_id:
|
||||||
|
description: "KEK key ring ID"
|
||||||
|
required: true
|
||||||
|
|
||||||
|
tf_acc_kek_key_version:
|
||||||
|
description: "KEK key version"
|
||||||
|
required: true
|
||||||
|
|
||||||
|
tf_acc_kek_service_account:
|
||||||
|
description: "KEK service account email"
|
||||||
|
required: true
|
||||||
|
|
||||||
region:
|
region:
|
||||||
description: "STACKIT region for tests"
|
description: "STACKIT region for tests"
|
||||||
default: 'eu01'
|
default: 'eu01'
|
||||||
required: true
|
required: true
|
||||||
|
|
||||||
service_account_json:
|
service_account_json_content:
|
||||||
description: "STACKIT service account JSON file contents"
|
description: "STACKIT service account JSON file contents"
|
||||||
required: true
|
required: true
|
||||||
|
default: ""
|
||||||
|
|
||||||
|
service_account_json_content_b64:
|
||||||
|
description: "STACKIT service account JSON file contents"
|
||||||
|
required: true
|
||||||
|
default: ""
|
||||||
|
|
||||||
|
service_account_json_file_path:
|
||||||
|
description: "STACKIT service account JSON file contents"
|
||||||
|
required: true
|
||||||
|
default: 'service_account.json'
|
||||||
|
|
||||||
test_file:
|
test_file:
|
||||||
description: "testfile to run"
|
description: "testfile to run"
|
||||||
default: ''
|
default: ''
|
||||||
|
|
||||||
outputs:
|
|
||||||
random-number:
|
#outputs:
|
||||||
description: "Random number"
|
# random-number:
|
||||||
value: ${{ steps.random-number-generator.outputs.random-number }}
|
# description: "Random number"
|
||||||
|
# value: ${{ steps.random-number-generator.outputs.random-number }}
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
steps:
|
steps:
|
||||||
- name: Random Number Generator
|
# - name: Random Number Generator
|
||||||
id: random-number-generator
|
# id: random-number-generator
|
||||||
run: echo "random-number=$(echo $RANDOM)" >> $GITHUB_OUTPUT
|
# run: echo "random-number=$(echo $RANDOM)" >> $GITHUB_OUTPUT
|
||||||
shell: bash
|
# shell: bash
|
||||||
|
|
||||||
- name: Install needed tools
|
- name: Install needed tools
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
|
echo "::group::apt install"
|
||||||
set -e
|
set -e
|
||||||
apt-get -y -qq update
|
apt-get -y -qq update >apt_update.log 2>apt_update_err.log
|
||||||
apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget
|
if [ $? -ne 0 ]; then
|
||||||
|
cat apt_update.log apt_update_err.log
|
||||||
|
fi
|
||||||
|
apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget >apt_get.log 2>apt_get_err.log
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
cat apt_get.log apt_get_err.log
|
||||||
|
fi
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
- name: Setup JAVA
|
- name: Setup JAVA
|
||||||
uses: actions/setup-java@v5
|
uses: actions/setup-java@v5
|
||||||
|
|
@ -53,62 +98,165 @@ runs:
|
||||||
- name: Install Go ${{ inputs.go-version }}
|
- name: Install Go ${{ inputs.go-version }}
|
||||||
uses: actions/setup-go@v6
|
uses: actions/setup-go@v6
|
||||||
with:
|
with:
|
||||||
go-version: ${{ inputs.go-version }}
|
# go-version: ${{ inputs.go-version }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
go-version-file: 'go.mod'
|
go-version-file: 'go.mod'
|
||||||
|
|
||||||
|
- name: Determine GOMODCACHE
|
||||||
|
shell: bash
|
||||||
|
id: goenv
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
echo "gomodcache=$(go env GOMODCACHE)" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Restore cached GO pkg
|
||||||
|
id: cache-gopkg
|
||||||
|
uses: actions/cache/restore@v5
|
||||||
|
with:
|
||||||
|
path: "${{ steps.goenv.outputs.gomodcache }}"
|
||||||
|
key: ${{ runner.os }}-gopkg
|
||||||
|
|
||||||
- name: Install go tools
|
- name: Install go tools
|
||||||
|
if: steps.cache-gopkg.outputs.cache-hit != 'true'
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
|
echo "::group::go install"
|
||||||
set -e
|
set -e
|
||||||
go mod download
|
go mod download
|
||||||
go install golang.org/x/tools/cmd/goimports@latest
|
go install golang.org/x/tools/cmd/goimports@latest
|
||||||
go install github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.7.2
|
go install github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework@latest
|
||||||
go install github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs@v0.24.0
|
go install github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi@latest
|
||||||
|
go install github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs@latest
|
||||||
|
go install github.com/golangci/golangci-lint/v2/cmd/golangci-lint@latest
|
||||||
|
echo "::endgroup::"
|
||||||
|
- name: Run go mod tidy
|
||||||
|
shell: bash
|
||||||
|
run: go mod tidy
|
||||||
|
|
||||||
- name: Prepare pkg_gen directory
|
- name: Save GO package Cache
|
||||||
|
id: cache-gopkg-save
|
||||||
|
uses: actions/cache/save@v5
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
${{ steps.goenv.outputs.gomodcache }}
|
||||||
|
key: ${{ runner.os }}-gopkg
|
||||||
|
|
||||||
|
- name: Creating service_account file from json input
|
||||||
|
if: inputs.service_account_json_content != ''
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
go run cmd/main.go build -p
|
echo "::group::create service account file"
|
||||||
|
set -e
|
||||||
|
set -o pipefail
|
||||||
|
|
||||||
|
jsonFile="${{ inputs.service_account_json_file_path }}"
|
||||||
|
jsonFile="${jsonFile:-x}"
|
||||||
|
if [ "${jsonFile}" == "x" ]; then
|
||||||
|
echo "no service account file path provided"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ ! -f "${jsonFile}" ]; then
|
||||||
|
echo "creating service account file '${{ inputs.service_account_json_file_path }}'"
|
||||||
|
echo "${{ inputs.service_account_json_content }}" > stackit/"${{ inputs.service_account_json_file_path }}"
|
||||||
|
fi
|
||||||
|
ls -l stackit/"${{ inputs.service_account_json_file_path }}"
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
- name: Creating service_account file from base64 json input
|
||||||
|
if: inputs.service_account_json_content_b64 != ''
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo "::group::create service account file"
|
||||||
|
set -e
|
||||||
|
set -o pipefail
|
||||||
|
|
||||||
|
jsonFile="${{ inputs.service_account_json_file_path }}"
|
||||||
|
jsonFile="${jsonFile:-x}"
|
||||||
|
if [ "${jsonFile}" == "x" ]; then
|
||||||
|
echo "no service account file path provided"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ ! -f "${jsonFile}" ]; then
|
||||||
|
echo "creating service account file '${{ inputs.service_account_json_file_path }}'"
|
||||||
|
echo "${{ inputs.service_account_json_content_b64 }}" | base64 -d > stackit/"${{ inputs.service_account_json_file_path }}"
|
||||||
|
fi
|
||||||
|
ls -l stackit/"${{ inputs.service_account_json_file_path }}"
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
- name: Run acceptance test file
|
- name: Run acceptance test file
|
||||||
if: ${{ inputs.test_file != '' }}
|
if: ${{ inputs.test_file != '' }}
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
|
echo "::group::go test file"
|
||||||
|
set -e
|
||||||
|
set -o pipefail
|
||||||
|
|
||||||
echo "Running acceptance tests for the terraform provider"
|
echo "Running acceptance tests for the terraform provider"
|
||||||
echo "${STACKIT_SERVICE_ACCOUNT_JSON}" > ~/.service_account.json
|
cd stackit || exit 1
|
||||||
cd stackit
|
|
||||||
TF_ACC=1 \
|
TF_ACC=1 \
|
||||||
TF_ACC_PROJECT_ID=${TF_ACC_PROJECT_ID} \
|
TF_ACC_PROJECT_ID=${TF_ACC_PROJECT_ID} \
|
||||||
TF_ACC_REGION=${TF_ACC_REGION} \
|
TF_ACC_REGION=${TF_ACC_REGION} \
|
||||||
go test ${{ inputs.test_file }} -count=1 -timeout=30m
|
TF_ACC_TEST_PROJECT_USER_EMAIL=${TF_ACC_TEST_PROJECT_USER_EMAIL} \
|
||||||
|
TF_ACC_SERVICE_ACCOUNT_FILE="${PWD}/${{ inputs.service_account_json_file_path }}" \
|
||||||
|
TF_ACC_KEK_KEY_ID=${TF_ACC_KEK_KEY_ID} \
|
||||||
|
TF_ACC_KEK_KEY_RING_ID=${TF_ACC_KEK_KEY_RING_ID} \
|
||||||
|
TF_ACC_KEK_KEY_VERSION=${TF_ACC_KEK_KEY_VERSION} \
|
||||||
|
TF_ACC_KEK_SERVICE_ACCOUNT=${TF_ACC_KEK_SERVICE_ACCOUNT} \
|
||||||
|
go test ${{ inputs.test_file }} -count=1 -timeout=${{ inputs.test_timeout_string }}
|
||||||
|
echo "::endgroup::"
|
||||||
env:
|
env:
|
||||||
STACKIT_SERVICE_ACCOUNT_JSON: ${{ inputs.service_account_json }}
|
TF_ACC_PROJECT_ID: ${{ inputs.project_id }}
|
||||||
TF_PROJECT_ID: ${{ inputs.project_id }}
|
|
||||||
TF_ACC_REGION: ${{ inputs.region }}
|
TF_ACC_REGION: ${{ inputs.region }}
|
||||||
# TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL: ${{ secrets.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL }}
|
TF_ACC_TEST_PROJECT_USER_EMAIL: ${{ inputs.project_user_email }}
|
||||||
# TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN: ${{ secrets.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN }}
|
TF_ACC_KEK_KEY_ID: ${{ inputs.tf_acc_kek_key_id }}
|
||||||
# TF_ACC_TEST_PROJECT_PARENT_CONTAINER_ID: ${{ secrets.TF_ACC_TEST_PROJECT_PARENT_CONTAINER_ID }}
|
TF_ACC_KEK_KEY_RING_ID: ${{ inputs.tf_acc_kek_key_ring_id }}
|
||||||
# TF_ACC_TEST_PROJECT_PARENT_UUID: ${{ secrets.TF_ACC_TEST_PROJECT_PARENT_UUID }}
|
TF_ACC_KEK_KEY_VERSION: ${{ inputs.tf_acc_kek_key_version }}
|
||||||
# TF_ACC_TEST_PROJECT_USER_EMAIL: ${{ secrets.TF_ACC_TEST_PROJECT_USER_EMAIL }}
|
TF_ACC_KEK_SERVICE_ACCOUNT: ${{ inputs.tf_acc_kek_service_account }}
|
||||||
|
|
||||||
|
# - name: Run test action
|
||||||
|
# if: ${{ inputs.test_file == '' }}
|
||||||
|
# env:
|
||||||
|
# TF_ACC: 1
|
||||||
|
# TF_ACC_PROJECT_ID: ${{ inputs.project_id }}
|
||||||
|
# TF_ACC_REGION: ${{ inputs.region }}
|
||||||
|
# TF_ACC_TEST_PROJECT_USER_EMAIL: ${{ inputs.project_user_email }}
|
||||||
|
# TF_ACC_KEK_KEY_ID: ${{ inputs.tf_acc_kek_key_id }}
|
||||||
|
# TF_ACC_KEK_KEY_RING_ID: ${{ inputs.tf_acc_kek_key_ring_id }}
|
||||||
|
# TF_ACC_KEK_KEY_VERSION: ${{ inputs.tf_acc_kek_key_version }}
|
||||||
|
# TF_ACC_KEK_SERVICE_ACCOUNT: ${{ inputs.tf_acc_kek_service_account }}
|
||||||
|
# TF_ACC_SERVICE_ACCOUNT_FILE: "${PWD}/${{ inputs.service_account_json_file_path }}"
|
||||||
|
# uses: robherley/go-test-action@v0.1.0
|
||||||
|
# with:
|
||||||
|
# testArguments: "./... -timeout 45m"
|
||||||
|
|
||||||
- name: Run acceptance tests
|
- name: Run acceptance tests
|
||||||
if: ${{ inputs.test_file == '' }}
|
if: ${{ inputs.test_file == '' }}
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
|
echo "::group::go test all"
|
||||||
|
set -e
|
||||||
|
set -o pipefail
|
||||||
|
|
||||||
echo "Running acceptance tests for the terraform provider"
|
echo "Running acceptance tests for the terraform provider"
|
||||||
echo "${STACKIT_SERVICE_ACCOUNT_JSON}" > ~/.service_account.json
|
cd stackit || exit 1
|
||||||
cd stackit
|
|
||||||
TF_ACC=1 \
|
TF_ACC=1 \
|
||||||
TF_ACC_PROJECT_ID=${TF_ACC_PROJECT_ID} \
|
TF_ACC_PROJECT_ID=${TF_ACC_PROJECT_ID} \
|
||||||
TF_ACC_REGION=${TF_ACC_REGION} \
|
TF_ACC_REGION=${TF_ACC_REGION} \
|
||||||
go test ./... -count=1 -timeout=30m
|
TF_ACC_TEST_PROJECT_USER_EMAIL=${TF_ACC_TEST_PROJECT_USER_EMAIL} \
|
||||||
|
TF_ACC_SERVICE_ACCOUNT_FILE="${PWD}/${{ inputs.service_account_json_file_path }}" \
|
||||||
|
TF_ACC_KEK_KEY_ID=${TF_ACC_KEK_KEY_ID} \
|
||||||
|
TF_ACC_KEK_KEY_RING_ID=${TF_ACC_KEK_KEY_RING_ID} \
|
||||||
|
TF_ACC_KEK_KEY_VERSION=${TF_ACC_KEK_KEY_VERSION} \
|
||||||
|
TF_ACC_KEK_SERVICE_ACCOUNT=${TF_ACC_KEK_SERVICE_ACCOUNT} \
|
||||||
|
go test ./... -count=1 -timeout=${{ inputs.test_timeout_string }}
|
||||||
|
echo "::endgroup::"
|
||||||
env:
|
env:
|
||||||
STACKIT_SERVICE_ACCOUNT_JSON: ${{ inputs.service_account_json }}
|
TF_ACC_PROJECT_ID: ${{ inputs.project_id }}
|
||||||
TF_PROJECT_ID: ${{ inputs.project_id }}
|
|
||||||
TF_ACC_REGION: ${{ inputs.region }}
|
TF_ACC_REGION: ${{ inputs.region }}
|
||||||
# TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL: ${{ secrets.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL }}
|
TF_ACC_TEST_PROJECT_USER_EMAIL: ${{ inputs.project_user_email }}
|
||||||
# TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN: ${{ secrets.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_TOKEN }}
|
TF_ACC_KEK_KEY_ID: ${{ inputs.tf_acc_kek_key_id }}
|
||||||
# TF_ACC_TEST_PROJECT_PARENT_CONTAINER_ID: ${{ secrets.TF_ACC_TEST_PROJECT_PARENT_CONTAINER_ID }}
|
TF_ACC_KEK_KEY_RING_ID: ${{ inputs.tf_acc_kek_key_ring_id }}
|
||||||
# TF_ACC_TEST_PROJECT_PARENT_UUID: ${{ secrets.TF_ACC_TEST_PROJECT_PARENT_UUID }}
|
TF_ACC_KEK_KEY_VERSION: ${{ inputs.tf_acc_kek_key_version }}
|
||||||
# TF_ACC_TEST_PROJECT_USER_EMAIL: ${{ secrets.TF_ACC_TEST_PROJECT_USER_EMAIL }}
|
TF_ACC_KEK_SERVICE_ACCOUNT: ${{ inputs.tf_acc_kek_service_account }}
|
||||||
|
|
|
||||||
54
.github/actions/build/action.yaml
vendored
54
.github/actions/build/action.yaml
vendored
|
|
@ -20,25 +20,63 @@ runs:
|
||||||
run: |
|
run: |
|
||||||
set -e
|
set -e
|
||||||
apt-get -y -qq update
|
apt-get -y -qq update
|
||||||
apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget
|
apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget unzip bc
|
||||||
|
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Install Go ${{ inputs.go-version }}
|
- name: Install Go ${{ inputs.go-version }}
|
||||||
uses: actions/setup-go@v6
|
uses: actions/setup-go@v6
|
||||||
with:
|
with:
|
||||||
go-version: ${{ inputs.go-version }}
|
# go-version: ${{ inputs.go-version }}
|
||||||
check-latest: true
|
check-latest: true
|
||||||
go-version-file: 'go.mod'
|
go-version-file: 'go.mod'
|
||||||
|
|
||||||
|
- name: Determine GOMODCACHE
|
||||||
|
shell: bash
|
||||||
|
id: goenv
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
# echo "::set-output name=gomodcache::$(go env GOMODCACHE)"
|
||||||
|
echo "gomodcache=$(go env GOMODCACHE)" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Restore cached GO pkg
|
||||||
|
id: cache-gopkg
|
||||||
|
uses: actions/cache/restore@v5
|
||||||
|
with:
|
||||||
|
path: "${{ steps.goenv.outputs.gomodcache }}"
|
||||||
|
key: ${{ runner.os }}-gopkg
|
||||||
|
|
||||||
- name: Install go tools
|
- name: Install go tools
|
||||||
|
if: steps.cache-gopkg.outputs.cache-hit != 'true'
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
set -e
|
set -e
|
||||||
go install golang.org/x/tools/cmd/goimports@latest
|
go install golang.org/x/tools/cmd/goimports@latest
|
||||||
go install github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework@latest
|
go install github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework@latest
|
||||||
go install github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi@latest
|
go install github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi@latest
|
||||||
go install github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs@v0.24.0
|
go install github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs@latest
|
||||||
|
|
||||||
|
# - name: Run build pkg directory
|
||||||
|
# shell: bash
|
||||||
|
# run: |
|
||||||
|
# set -e
|
||||||
|
# go run generator/main.go build
|
||||||
|
|
||||||
|
- name: Get all go packages
|
||||||
|
if: steps.cache-gopkg.outputs.cache-hit != 'true'
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
go get ./...
|
||||||
|
|
||||||
|
- name: Save Cache
|
||||||
|
id: cache-gopkg-save
|
||||||
|
uses: actions/cache/save@v5
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
${{ steps.goenv.outputs.gomodcache }}
|
||||||
|
key: ${{ runner.os }}-gopkg
|
||||||
|
|
||||||
- name: Setup JAVA ${{ inputs.java-distribution }} ${{ inputs.go-version }}
|
- name: Setup JAVA ${{ inputs.java-distribution }} ${{ inputs.go-version }}
|
||||||
uses: actions/setup-java@v5
|
uses: actions/setup-java@v5
|
||||||
|
|
@ -46,16 +84,6 @@ runs:
|
||||||
distribution: ${{ inputs.java-distribution }} # See 'Supported distributions' for available options
|
distribution: ${{ inputs.java-distribution }} # See 'Supported distributions' for available options
|
||||||
java-version: ${{ inputs.java-version }}
|
java-version: ${{ inputs.java-version }}
|
||||||
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v6
|
|
||||||
|
|
||||||
- name: Run build pkg directory
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
set -e
|
|
||||||
go run cmd/main.go build
|
|
||||||
|
|
||||||
|
|
||||||
- name: Run make to build app
|
- name: Run make to build app
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
|
|
|
||||||
4
.github/actions/setup-cache-go/action.yaml
vendored
4
.github/actions/setup-cache-go/action.yaml
vendored
|
|
@ -26,9 +26,9 @@ runs:
|
||||||
uses: https://code.forgejo.org/actions/setup-go@v6
|
uses: https://code.forgejo.org/actions/setup-go@v6
|
||||||
id: go-version
|
id: go-version
|
||||||
with:
|
with:
|
||||||
go-version: ${{ inputs.go-version }}
|
# go-version: ${{ inputs.go-version }}
|
||||||
check-latest: true # Always check for the latest patch release
|
check-latest: true # Always check for the latest patch release
|
||||||
# go-version-file: "go.mod"
|
go-version-file: "go.mod"
|
||||||
# do not cache dependencies, we do this manually
|
# do not cache dependencies, we do this manually
|
||||||
cache: false
|
cache: false
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -22,6 +22,39 @@ env:
|
||||||
CODE_COVERAGE_ARTIFACT_NAME: "code-coverage"
|
CODE_COVERAGE_ARTIFACT_NAME: "code-coverage"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
runner_test:
|
||||||
|
name: "Test STACKIT runner"
|
||||||
|
runs-on: stackit-docker
|
||||||
|
steps:
|
||||||
|
- name: Install needed tools
|
||||||
|
run: |
|
||||||
|
apt-get -y -qq update
|
||||||
|
apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget
|
||||||
|
|
||||||
|
- name: Setup Go
|
||||||
|
uses: actions/setup-go@v6
|
||||||
|
with:
|
||||||
|
go-version: ${{ env.GO_VERSION }}
|
||||||
|
|
||||||
|
- name: Install go tools
|
||||||
|
run: |
|
||||||
|
go install golang.org/x/tools/cmd/goimports@latest
|
||||||
|
go install github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework@latest
|
||||||
|
go install github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi@latest
|
||||||
|
|
||||||
|
- name: Setup JAVA
|
||||||
|
uses: actions/setup-java@v5
|
||||||
|
with:
|
||||||
|
distribution: 'temurin' # See 'Supported distributions' for available options
|
||||||
|
java-version: '21'
|
||||||
|
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
|
- name: Run build pkg directory
|
||||||
|
run: |
|
||||||
|
go run cmd/main.go build
|
||||||
|
|
||||||
publish_test:
|
publish_test:
|
||||||
name: "Test readiness for publishing provider"
|
name: "Test readiness for publishing provider"
|
||||||
needs: config
|
needs: config
|
||||||
|
|
@ -201,29 +234,29 @@ jobs:
|
||||||
run: make lint
|
run: make lint
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
|
|
||||||
# - name: Testing
|
# - name: Testing
|
||||||
# run: make test
|
# run: make test
|
||||||
#
|
#
|
||||||
# - name: Acceptance Testing
|
# - name: Acceptance Testing
|
||||||
# if: ${{ github.event_name == 'pull_request' }}
|
# if: ${{ github.event_name == 'pull_request' }}
|
||||||
# run: make test-acceptance-tf
|
# run: make test-acceptance-tf
|
||||||
#
|
#
|
||||||
# - name: Check coverage threshold
|
# - name: Check coverage threshold
|
||||||
# shell: bash
|
# shell: bash
|
||||||
# run: |
|
# run: |
|
||||||
# make coverage
|
# make coverage
|
||||||
# COVERAGE=$(go tool cover -func=coverage.out | grep total | awk '{print $3}' | sed 's/%//')
|
# COVERAGE=$(go tool cover -func=coverage.out | grep total | awk '{print $3}' | sed 's/%//')
|
||||||
# echo "Coverage: $COVERAGE%"
|
# echo "Coverage: $COVERAGE%"
|
||||||
# if (( $(echo "$COVERAGE < 80" | bc -l) )); then
|
# if (( $(echo "$COVERAGE < 80" | bc -l) )); then
|
||||||
# echo "Coverage is below 80%"
|
# echo "Coverage is below 80%"
|
||||||
# # exit 1
|
# # exit 1
|
||||||
# fi
|
# fi
|
||||||
|
|
||||||
# - name: Archive code coverage results
|
# - name: Archive code coverage results
|
||||||
# uses: actions/upload-artifact@v4
|
# uses: actions/upload-artifact@v4
|
||||||
# with:
|
# with:
|
||||||
# name: ${{ env.CODE_COVERAGE_ARTIFACT_NAME }}
|
# name: ${{ env.CODE_COVERAGE_ARTIFACT_NAME }}
|
||||||
# path: "stackit/${{ env.CODE_COVERAGE_FILE_NAME }}"
|
# path: "stackit/${{ env.CODE_COVERAGE_FILE_NAME }}"
|
||||||
|
|
||||||
config:
|
config:
|
||||||
if: ${{ github.event_name != 'schedule' }}
|
if: ${{ github.event_name != 'schedule' }}
|
||||||
343
.github/workflows/ci_new.yaml
vendored
Normal file
343
.github/workflows/ci_new.yaml
vendored
Normal file
|
|
@ -0,0 +1,343 @@
|
||||||
|
name: CI Workflow
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- alpha
|
||||||
|
- main
|
||||||
|
workflow_dispatch:
|
||||||
|
schedule:
|
||||||
|
# every sunday at 00:00
|
||||||
|
# - cron: '0 0 * * 0'
|
||||||
|
# every day at 00:00
|
||||||
|
- cron: '0 0 * * *'
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- '!main'
|
||||||
|
- '!alpha'
|
||||||
|
paths:
|
||||||
|
- '!.github'
|
||||||
|
|
||||||
|
env:
|
||||||
|
GO_VERSION: "1.25"
|
||||||
|
CODE_COVERAGE_FILE_NAME: "coverage.out" # must be the same as in Makefile
|
||||||
|
CODE_COVERAGE_ARTIFACT_NAME: "code-coverage"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
config:
|
||||||
|
if: ${{ github.event_name != 'schedule' }}
|
||||||
|
name: Check GoReleaser config
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
|
- name: Check GoReleaser
|
||||||
|
uses: goreleaser/goreleaser-action@v7
|
||||||
|
with:
|
||||||
|
args: check
|
||||||
|
|
||||||
|
prepare:
|
||||||
|
name: Prepare GO cache
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
actions: read # Required to identify workflow run.
|
||||||
|
checks: write # Required to add status summary.
|
||||||
|
contents: read # Required to checkout repository.
|
||||||
|
pull-requests: write # Required to add PR comment.
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
|
- name: Install Go ${{ inputs.go-version }}
|
||||||
|
id: go-install
|
||||||
|
uses: actions/setup-go@v6
|
||||||
|
with:
|
||||||
|
# go-version: ${{ inputs.go-version }}
|
||||||
|
check-latest: true
|
||||||
|
go-version-file: 'go.mod'
|
||||||
|
|
||||||
|
- name: Determine GOMODCACHE
|
||||||
|
shell: bash
|
||||||
|
id: goenv
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
# echo "::set-output name=gomodcache::$(go env GOMODCACHE)"
|
||||||
|
echo "gomodcache=$(go env GOMODCACHE)" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Restore cached GO pkg
|
||||||
|
id: cache-gopkg
|
||||||
|
uses: actions/cache/restore@v5
|
||||||
|
with:
|
||||||
|
path: "${{ steps.goenv.outputs.gomodcache }}"
|
||||||
|
key: ${{ runner.os }}-gopkg
|
||||||
|
|
||||||
|
- name: Install go tools
|
||||||
|
if: steps.cache-gopkg.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
go install golang.org/x/tools/cmd/goimports@latest
|
||||||
|
go install github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework@latest
|
||||||
|
go install github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi@latest
|
||||||
|
|
||||||
|
- name: Get all go packages
|
||||||
|
if: steps.cache-gopkg.outputs.cache-hit != 'true'
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
go get ./...
|
||||||
|
|
||||||
|
- name: Save Cache
|
||||||
|
if: steps.cache-gopkg.outputs.cache-hit != 'true'
|
||||||
|
id: cache-gopkg-save
|
||||||
|
uses: actions/cache/save@v5
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
${{ steps.goenv.outputs.gomodcache }}
|
||||||
|
key: ${{ runner.os }}-gopkg
|
||||||
|
|
||||||
|
|
||||||
|
publish_test:
|
||||||
|
name: "Test readiness for publishing provider"
|
||||||
|
needs:
|
||||||
|
- config
|
||||||
|
- prepare
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
actions: read # Required to identify workflow run.
|
||||||
|
checks: write # Required to add status summary.
|
||||||
|
contents: read # Required to checkout repository.
|
||||||
|
pull-requests: write # Required to add PR comment.
|
||||||
|
steps:
|
||||||
|
- name: Install needed tools
|
||||||
|
run: |
|
||||||
|
apt-get -y -qq update
|
||||||
|
apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget unzip bc
|
||||||
|
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
|
- name: Setup Go
|
||||||
|
uses: actions/setup-go@v6
|
||||||
|
with:
|
||||||
|
# go-version: ${{ env.GO_VERSION }}
|
||||||
|
check-latest: true
|
||||||
|
go-version-file: 'go.mod'
|
||||||
|
|
||||||
|
- name: Install go tools
|
||||||
|
run: |
|
||||||
|
go install golang.org/x/tools/cmd/goimports@latest
|
||||||
|
go install github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework@latest
|
||||||
|
go install github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi@latest
|
||||||
|
|
||||||
|
- name: Setup JAVA
|
||||||
|
uses: actions/setup-java@v5
|
||||||
|
with:
|
||||||
|
distribution: 'temurin' # See 'Supported distributions' for available options
|
||||||
|
java-version: '21'
|
||||||
|
|
||||||
|
# - name: Run build pkg directory
|
||||||
|
# run: |
|
||||||
|
# go run generator/main.go build
|
||||||
|
|
||||||
|
- name: Set up s3cfg
|
||||||
|
run: |
|
||||||
|
cat <<'EOF' >> ~/.s3cfg
|
||||||
|
[default]
|
||||||
|
host_base = https://object.storage.eu01.onstackit.cloud
|
||||||
|
host_bucket = https://%(bucket).object.storage.eu01.onstackit.cloud
|
||||||
|
check_ssl_certificate = False
|
||||||
|
access_key = ${{ secrets.S3_ACCESS_KEY }}
|
||||||
|
secret_key = ${{ secrets.S3_SECRET_KEY }}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
- name: Import GPG key
|
||||||
|
run: |
|
||||||
|
echo "${{ secrets.PRIVATE_KEY_PEM }}" > ~/private.key.pem
|
||||||
|
gpg --import ~/private.key.pem
|
||||||
|
rm ~/private.key.pem
|
||||||
|
|
||||||
|
- name: Run GoReleaser with SNAPSHOT
|
||||||
|
id: goreleaser
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ env.FORGEJO_TOKEN }}
|
||||||
|
GPG_FINGERPRINT: ${{ secrets.GPG_FINGERPRINT }}
|
||||||
|
uses: goreleaser/goreleaser-action@v7
|
||||||
|
with:
|
||||||
|
args: release --skip publish --clean --snapshot
|
||||||
|
|
||||||
|
- name: Prepare key file
|
||||||
|
run: |
|
||||||
|
echo "${{ secrets.PUBLIC_KEY_PEM }}" >public_key.pem
|
||||||
|
|
||||||
|
- name: Prepare provider directory structure
|
||||||
|
run: |
|
||||||
|
VERSION=$(jq -r .version < dist/metadata.json)
|
||||||
|
go run generator/main.go \
|
||||||
|
publish \
|
||||||
|
--namespace=mhenselin \
|
||||||
|
--providerName=stackitprivatepreview \
|
||||||
|
--repoName=terraform-provider-stackitprivatepreview \
|
||||||
|
--domain=tfregistry.sysops.stackit.rocks \
|
||||||
|
--gpgFingerprint="${{ secrets.GPG_FINGERPRINT }}" \
|
||||||
|
--gpgPubKeyFile=public_key.pem \
|
||||||
|
--version=${VERSION}
|
||||||
|
|
||||||
|
testing:
|
||||||
|
name: CI run tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs:
|
||||||
|
- config
|
||||||
|
- prepare
|
||||||
|
env:
|
||||||
|
TF_ACC_PROJECT_ID: ${{ vars.TF_ACC_PROJECT_ID }}
|
||||||
|
TF_ACC_ORGANIZATION_ID: ${{ vars.TF_ACC_ORGANIZATION_ID }}
|
||||||
|
TF_ACC_REGION: ${{ vars.TF_ACC_REGION }}
|
||||||
|
TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL: ${{ vars.TF_ACC_TEST_PROJECT_SERVICE_ACCOUNT_EMAIL }}
|
||||||
|
TF_ACC_SERVICE_ACCOUNT_FILE: "~/service_account.json"
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
uses: ./.github/actions/build
|
||||||
|
with:
|
||||||
|
go-version: ${{ env.GO_VERSION }}
|
||||||
|
|
||||||
|
- name: Setup Terraform
|
||||||
|
uses: hashicorp/setup-terraform@v2
|
||||||
|
with:
|
||||||
|
terraform_wrapper: false
|
||||||
|
|
||||||
|
- name: Create service account json file
|
||||||
|
if: ${{ github.event_name == 'pull_request' }}
|
||||||
|
run: |
|
||||||
|
echo "${{ secrets.TF_ACC_SERVICE_ACCOUNT_JSON }}" >~/.service_account.json
|
||||||
|
|
||||||
|
- name: Run go mod tidy
|
||||||
|
if: ${{ github.event_name == 'pull_request' }}
|
||||||
|
run: go mod tidy
|
||||||
|
|
||||||
|
- name: Testing
|
||||||
|
run: |
|
||||||
|
TF_ACC_SERVICE_ACCOUNT_FILE=~/.service_account.json
|
||||||
|
export TF_ACC_SERVICE_ACCOUNT_FILE
|
||||||
|
make test
|
||||||
|
|
||||||
|
# - name: Acceptance Testing
|
||||||
|
# env:
|
||||||
|
# TF_ACC: "1"
|
||||||
|
# if: ${{ github.event_name == 'pull_request' }}
|
||||||
|
# run: |
|
||||||
|
# TF_ACC_SERVICE_ACCOUNT_FILE=~/.service_account.json
|
||||||
|
# export TF_ACC_SERVICE_ACCOUNT_FILE
|
||||||
|
# make test-acceptance-tf
|
||||||
|
|
||||||
|
- name: Run Test
|
||||||
|
if: ${{ github.event_name == 'pull_request' }}
|
||||||
|
uses: ./.github/actions/acc_test
|
||||||
|
with:
|
||||||
|
go-version: ${{ env.GO_VERSION }}
|
||||||
|
project_id: ${{ vars.TF_ACC_PROJECT_ID }}
|
||||||
|
region: ${{ vars.TF_ACC_REGION }}
|
||||||
|
service_account_json_content_b64: "${{ secrets.TF_ACC_SERVICE_ACCOUNT_JSON_B64 }}"
|
||||||
|
project_user_email: ${{ vars.TEST_PROJECT_USER_EMAIL }}
|
||||||
|
tf_acc_kek_key_id: ${{ vars.TF_ACC_KEK_KEY_ID }}
|
||||||
|
tf_acc_kek_key_ring_id: ${{ vars.TF_ACC_KEK_KEY_RING_ID }}
|
||||||
|
tf_acc_kek_key_version: ${{ vars.TF_ACC_KEK_KEY_VERSION }}
|
||||||
|
tf_acc_kek_service_account: ${{ vars.TF_ACC_KEK_SERVICE_ACCOUNT }}
|
||||||
|
# service_account_json_file_path: "~/service_account.json"
|
||||||
|
|
||||||
|
- name: Check coverage threshold
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
make coverage
|
||||||
|
COVERAGE=$(go tool cover -func=coverage.out | grep total | awk '{print $3}' | sed 's/%//')
|
||||||
|
echo "Coverage: $COVERAGE%"
|
||||||
|
if (( $(echo "$COVERAGE < 80" | bc -l) )); then
|
||||||
|
echo "Coverage is below 80%"
|
||||||
|
# exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Archive code coverage results
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ${{ env.CODE_COVERAGE_ARTIFACT_NAME }}
|
||||||
|
path: "stackit/${{ env.CODE_COVERAGE_FILE_NAME }}"
|
||||||
|
|
||||||
|
main:
|
||||||
|
if: ${{ github.event_name != 'schedule' }}
|
||||||
|
name: CI run build and linting
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs:
|
||||||
|
- config
|
||||||
|
- prepare
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
|
# - uses: actions/cache@v5
|
||||||
|
# id: cache
|
||||||
|
# with:
|
||||||
|
# path: path/to/dependencies
|
||||||
|
# key: ${{ runner.os }}-${{ hashFiles('**/lockfiles') }}
|
||||||
|
|
||||||
|
# - name: Install Dependencies
|
||||||
|
# if: steps.cache.outputs.cache-hit != 'true'
|
||||||
|
# run: /install.sh
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
uses: ./.github/actions/build
|
||||||
|
with:
|
||||||
|
go-version: ${{ env.GO_VERSION }}
|
||||||
|
|
||||||
|
- name: Setup Terraform
|
||||||
|
uses: hashicorp/setup-terraform@v2
|
||||||
|
with:
|
||||||
|
terraform_wrapper: false
|
||||||
|
|
||||||
|
- name: "Ensure docs are up-to-date"
|
||||||
|
if: ${{ github.event_name == 'pull_request' }}
|
||||||
|
run: ./scripts/check-docs.sh
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: "Run go mod tidy"
|
||||||
|
if: ${{ github.event_name == 'pull_request' }}
|
||||||
|
run: go mod tidy
|
||||||
|
|
||||||
|
- name: golangci-lint
|
||||||
|
uses: golangci/golangci-lint-action@v9
|
||||||
|
with:
|
||||||
|
version: v2.10
|
||||||
|
args: --config=.golang-ci.yaml --allow-parallel-runners --timeout=5m
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Linting terraform files
|
||||||
|
run: make lint-tf
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
code_coverage:
|
||||||
|
name: "Code coverage report"
|
||||||
|
if: github.event_name == 'pull_request' # Do not run when workflow is triggered by push to main branch
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs:
|
||||||
|
- main
|
||||||
|
- prepare
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
actions: read # to download code coverage results from "main" job
|
||||||
|
pull-requests: write # write permission needed to comment on PR
|
||||||
|
steps:
|
||||||
|
- name: Install needed tools
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
apt-get -y -qq update
|
||||||
|
apt-get -y -qq install sudo
|
||||||
|
|
||||||
|
- name: Check new code coverage
|
||||||
|
uses: fgrosse/go-coverage-report@v1.2.0
|
||||||
|
continue-on-error: true # Add this line to prevent pipeline failures in forks
|
||||||
|
with:
|
||||||
|
coverage-artifact-name: ${{ env.CODE_COVERAGE_ARTIFACT_NAME }}
|
||||||
|
coverage-file-name: ${{ env.CODE_COVERAGE_FILE_NAME }}
|
||||||
|
root-package: 'github.com/stackitcloud/terraform-provider-stackit'
|
||||||
32
.github/workflows/publish.yaml
vendored
32
.github/workflows/publish.yaml
vendored
|
|
@ -23,7 +23,7 @@ jobs:
|
||||||
uses: actions/checkout@v6
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Check GoReleaser
|
- name: Check GoReleaser
|
||||||
uses: goreleaser/goreleaser-action@v6
|
uses: goreleaser/goreleaser-action@v7
|
||||||
with:
|
with:
|
||||||
args: check
|
args: check
|
||||||
|
|
||||||
|
|
@ -43,10 +43,15 @@ jobs:
|
||||||
apt-get -y -qq update
|
apt-get -y -qq update
|
||||||
apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget
|
apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget
|
||||||
|
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Setup Go
|
- name: Setup Go
|
||||||
uses: actions/setup-go@v6
|
uses: actions/setup-go@v6
|
||||||
with:
|
with:
|
||||||
go-version: ${{ env.GO_VERSION }}
|
# go-version: ${{ env.GO_VERSION }}
|
||||||
|
check-latest: true
|
||||||
|
go-version-file: 'go.mod'
|
||||||
|
|
||||||
- name: Install go tools
|
- name: Install go tools
|
||||||
run: |
|
run: |
|
||||||
|
|
@ -60,16 +65,6 @@ jobs:
|
||||||
distribution: 'temurin' # See 'Supported distributions' for available options
|
distribution: 'temurin' # See 'Supported distributions' for available options
|
||||||
java-version: '21'
|
java-version: '21'
|
||||||
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v6
|
|
||||||
|
|
||||||
- name: Run build pkg directory
|
|
||||||
run: |
|
|
||||||
set -e
|
|
||||||
mkdir -p generated/services
|
|
||||||
mkdir -p generated/internal/services
|
|
||||||
go run cmd/main.go build
|
|
||||||
|
|
||||||
- name: Set up s3cfg
|
- name: Set up s3cfg
|
||||||
run: |
|
run: |
|
||||||
cat <<'EOF' >> ~/.s3cfg
|
cat <<'EOF' >> ~/.s3cfg
|
||||||
|
|
@ -93,7 +88,7 @@ jobs:
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ env.FORGEJO_TOKEN }}
|
GITHUB_TOKEN: ${{ env.FORGEJO_TOKEN }}
|
||||||
GPG_FINGERPRINT: ${{ secrets.GPG_FINGERPRINT }}
|
GPG_FINGERPRINT: ${{ secrets.GPG_FINGERPRINT }}
|
||||||
uses: goreleaser/goreleaser-action@v6
|
uses: goreleaser/goreleaser-action@v7
|
||||||
with:
|
with:
|
||||||
args: release --skip publish --clean --snapshot
|
args: release --skip publish --clean --snapshot
|
||||||
|
|
||||||
|
|
@ -103,7 +98,7 @@ jobs:
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ env.FORGEJO_TOKEN }}
|
GITHUB_TOKEN: ${{ env.FORGEJO_TOKEN }}
|
||||||
GPG_FINGERPRINT: ${{ secrets.GPG_FINGERPRINT }}
|
GPG_FINGERPRINT: ${{ secrets.GPG_FINGERPRINT }}
|
||||||
uses: goreleaser/goreleaser-action@v6
|
uses: goreleaser/goreleaser-action@v7
|
||||||
with:
|
with:
|
||||||
args: release --skip publish --clean
|
args: release --skip publish --clean
|
||||||
|
|
||||||
|
|
@ -114,7 +109,7 @@ jobs:
|
||||||
- name: Prepare provider directory structure
|
- name: Prepare provider directory structure
|
||||||
run: |
|
run: |
|
||||||
VERSION=$(jq -r .version < dist/metadata.json)
|
VERSION=$(jq -r .version < dist/metadata.json)
|
||||||
go run cmd/main.go \
|
go run generator/main.go \
|
||||||
publish \
|
publish \
|
||||||
--namespace=mhenselin \
|
--namespace=mhenselin \
|
||||||
--providerName=stackitprivatepreview \
|
--providerName=stackitprivatepreview \
|
||||||
|
|
@ -124,6 +119,12 @@ jobs:
|
||||||
--gpgPubKeyFile=public_key.pem \
|
--gpgPubKeyFile=public_key.pem \
|
||||||
--version=${VERSION}
|
--version=${VERSION}
|
||||||
|
|
||||||
|
- name: Prepare documentation nav file
|
||||||
|
run: |
|
||||||
|
go run generator/main.go \
|
||||||
|
docs \
|
||||||
|
--outFile nav.md
|
||||||
|
|
||||||
- name: Publish provider to S3
|
- name: Publish provider to S3
|
||||||
run: |
|
run: |
|
||||||
set -e
|
set -e
|
||||||
|
|
@ -143,3 +144,4 @@ jobs:
|
||||||
ssh -o StrictHostKeyChecking=no ubuntu@${{ vars.DOCS_SERVER_IP }} 'rm -rf /srv/www/docs'
|
ssh -o StrictHostKeyChecking=no ubuntu@${{ vars.DOCS_SERVER_IP }} 'rm -rf /srv/www/docs'
|
||||||
echo "${{ github.ref_name }}" >docs/_version.txt
|
echo "${{ github.ref_name }}" >docs/_version.txt
|
||||||
scp -o StrictHostKeyChecking=no -r docs ubuntu@${{ vars.DOCS_SERVER_IP }}:/srv/www/
|
scp -o StrictHostKeyChecking=no -r docs ubuntu@${{ vars.DOCS_SERVER_IP }}:/srv/www/
|
||||||
|
scp -o StrictHostKeyChecking=no nav.md ubuntu@${{ vars.DOCS_SERVER_IP }}:/srv/www/
|
||||||
|
|
|
||||||
6
.github/workflows/release.yaml
vendored
6
.github/workflows/release.yaml
vendored
|
|
@ -22,17 +22,19 @@ jobs:
|
||||||
with:
|
with:
|
||||||
# Allow goreleaser to access older tag information.
|
# Allow goreleaser to access older tag information.
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-go@v5
|
|
||||||
|
- uses: https://code.forgejo.org/actions/setup-go@v6
|
||||||
with:
|
with:
|
||||||
go-version-file: "go.mod"
|
go-version-file: "go.mod"
|
||||||
cache: true
|
cache: true
|
||||||
|
|
||||||
- name: Import GPG key
|
- name: Import GPG key
|
||||||
uses: crazy-max/ghaction-import-gpg@v6
|
uses: crazy-max/ghaction-import-gpg@v6
|
||||||
id: import_gpg
|
id: import_gpg
|
||||||
with:
|
with:
|
||||||
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
|
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
|
||||||
- name: Run GoReleaser
|
- name: Run GoReleaser
|
||||||
uses: goreleaser/goreleaser-action@v6
|
uses: goreleaser/goreleaser-action@v7
|
||||||
with:
|
with:
|
||||||
args: release --clean
|
args: release --clean
|
||||||
env:
|
env:
|
||||||
|
|
|
||||||
29
.github/workflows/runnerstats.yaml
vendored
Normal file
29
.github/workflows/runnerstats.yaml
vendored
Normal file
|
|
@ -0,0 +1,29 @@
|
||||||
|
name: Runner stats
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
stats-own:
|
||||||
|
name: "Get own runner stats"
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Install needed tools
|
||||||
|
run: |
|
||||||
|
apt-get -y -qq update
|
||||||
|
apt-get -y -qq install inxi
|
||||||
|
|
||||||
|
- name: Show stats
|
||||||
|
run: inxi -c 0
|
||||||
|
|
||||||
|
stats-stackit:
|
||||||
|
name: "Get STACKIT runner stats"
|
||||||
|
runs-on: stackit-docker
|
||||||
|
steps:
|
||||||
|
- name: Install needed tools
|
||||||
|
run: |
|
||||||
|
apt-get -y -qq update
|
||||||
|
apt-get -y -qq install inxi
|
||||||
|
|
||||||
|
- name: Show stats
|
||||||
|
run: inxi -c 0
|
||||||
10
.github/workflows/tf-acc-test.yaml
vendored
10
.github/workflows/tf-acc-test.yaml
vendored
|
|
@ -18,6 +18,12 @@ jobs:
|
||||||
uses: ./.github/actions/acc_test
|
uses: ./.github/actions/acc_test
|
||||||
with:
|
with:
|
||||||
go-version: ${{ env.GO_VERSION }}
|
go-version: ${{ env.GO_VERSION }}
|
||||||
project_id: ${{ vars.TEST_PROJECT_ID }}
|
project_id: ${{ vars.TF_ACC_PROJECT_ID }}
|
||||||
region: 'eu01'
|
region: 'eu01'
|
||||||
service_account_json: ${{ secrets.TF_ACC_SERVICE_ACCOUNT_JSON }}
|
service_account_json_content_b64: "${{ secrets.TF_ACC_SERVICE_ACCOUNT_JSON_B64 }}"
|
||||||
|
project_user_email: ${{ vars.TEST_PROJECT_USER_EMAIL }}
|
||||||
|
tf_acc_kek_key_id: ${{ vars.TF_ACC_KEK_KEY_ID }}
|
||||||
|
tf_acc_kek_key_ring_id: ${{ vars.TF_ACC_KEK_KEY_RING_ID }}
|
||||||
|
tf_acc_kek_key_version: ${{ vars.TF_ACC_KEK_KEY_VERSION }}
|
||||||
|
tf_acc_kek_service_account: ${{ vars.TF_ACC_KEK_SERVICE_ACCOUNT }}
|
||||||
|
# service_account_json_file_path: "~/service_account.json"
|
||||||
|
|
|
||||||
1
.gitignore
vendored
1
.gitignore
vendored
|
|
@ -40,6 +40,7 @@ coverage.out
|
||||||
coverage.html
|
coverage.html
|
||||||
generated
|
generated
|
||||||
stackit-sdk-generator
|
stackit-sdk-generator
|
||||||
|
stackit-sdk-generator/**
|
||||||
dist
|
dist
|
||||||
|
|
||||||
.secrets
|
.secrets
|
||||||
|
|
|
||||||
94
.golang-ci.yaml
Normal file
94
.golang-ci.yaml
Normal file
|
|
@ -0,0 +1,94 @@
|
||||||
|
version: "2"
|
||||||
|
run:
|
||||||
|
concurrency: 4
|
||||||
|
output:
|
||||||
|
formats:
|
||||||
|
text:
|
||||||
|
print-linter-name: true
|
||||||
|
print-issued-lines: true
|
||||||
|
colors: true
|
||||||
|
path: stdout
|
||||||
|
linters:
|
||||||
|
enable:
|
||||||
|
- bodyclose
|
||||||
|
- depguard
|
||||||
|
- errorlint
|
||||||
|
- forcetypeassert
|
||||||
|
- gochecknoinits
|
||||||
|
- gocritic
|
||||||
|
- gosec
|
||||||
|
- misspell
|
||||||
|
- nakedret
|
||||||
|
- revive
|
||||||
|
- sqlclosecheck
|
||||||
|
- wastedassign
|
||||||
|
disable:
|
||||||
|
- noctx
|
||||||
|
- unparam
|
||||||
|
settings:
|
||||||
|
depguard:
|
||||||
|
rules:
|
||||||
|
main:
|
||||||
|
list-mode: lax
|
||||||
|
allow:
|
||||||
|
- tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview
|
||||||
|
- github.com/hashicorp/terraform-plugin-framework
|
||||||
|
- github.com/hashicorp/terraform-plugin-log
|
||||||
|
- github.com/stackitcloud/stackit-sdk-go
|
||||||
|
deny:
|
||||||
|
- pkg: github.com/stretchr/testify
|
||||||
|
desc: Do not use a testing framework
|
||||||
|
gocritic:
|
||||||
|
disabled-checks:
|
||||||
|
- wrapperFunc
|
||||||
|
- typeDefFirst
|
||||||
|
- ifElseChain
|
||||||
|
- dupImport
|
||||||
|
- hugeParam
|
||||||
|
enabled-tags:
|
||||||
|
- performance
|
||||||
|
- style
|
||||||
|
- experimental
|
||||||
|
gosec:
|
||||||
|
excludes:
|
||||||
|
- G104
|
||||||
|
- G102
|
||||||
|
- G304
|
||||||
|
- G307
|
||||||
|
misspell:
|
||||||
|
locale: US
|
||||||
|
nakedret:
|
||||||
|
max-func-lines: 0
|
||||||
|
revive:
|
||||||
|
severity: error
|
||||||
|
rules:
|
||||||
|
- name: errorf
|
||||||
|
- name: context-as-argument
|
||||||
|
- name: error-return
|
||||||
|
- name: increment-decrement
|
||||||
|
- name: indent-error-flow
|
||||||
|
- name: superfluous-else
|
||||||
|
- name: unused-parameter
|
||||||
|
- name: unreachable-code
|
||||||
|
- name: atomic
|
||||||
|
- name: empty-lines
|
||||||
|
- name: early-return
|
||||||
|
exclusions:
|
||||||
|
paths:
|
||||||
|
- generator/
|
||||||
|
generated: lax
|
||||||
|
warn-unused: true
|
||||||
|
# Excluding configuration per-path, per-linter, per-text and per-source.
|
||||||
|
rules:
|
||||||
|
# Exclude some linters from running on tests files.
|
||||||
|
- path: _test\.go
|
||||||
|
linters:
|
||||||
|
- gochecknoinits
|
||||||
|
formatters:
|
||||||
|
enable:
|
||||||
|
#- gofmt
|
||||||
|
- goimports
|
||||||
|
settings:
|
||||||
|
goimports:
|
||||||
|
local-prefixes:
|
||||||
|
- tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview
|
||||||
7
Makefile
7
Makefile
|
|
@ -12,17 +12,20 @@ project-tools:
|
||||||
# LINT
|
# LINT
|
||||||
lint-golangci-lint:
|
lint-golangci-lint:
|
||||||
@echo "Linting with golangci-lint"
|
@echo "Linting with golangci-lint"
|
||||||
@$(SCRIPTS_BASE)/lint-golangci-lint.sh
|
@go run github.com/golangci/golangci-lint/v2/cmd/golangci-lint run --fix --config .golang-ci.yaml
|
||||||
|
|
||||||
|
|
||||||
lint-tf:
|
lint-tf:
|
||||||
@echo "Linting terraform files"
|
@echo "Linting terraform files"
|
||||||
@terraform fmt -check -diff -recursive
|
@terraform fmt -check -diff -recursive examples/
|
||||||
|
@terraform fmt -check -diff -recursive stackit/
|
||||||
|
|
||||||
lint: lint-golangci-lint lint-tf
|
lint: lint-golangci-lint lint-tf
|
||||||
|
|
||||||
# DOCUMENTATION GENERATION
|
# DOCUMENTATION GENERATION
|
||||||
generate-docs:
|
generate-docs:
|
||||||
@echo "Generating documentation with tfplugindocs"
|
@echo "Generating documentation with tfplugindocs"
|
||||||
|
|
||||||
@$(SCRIPTS_BASE)/tfplugindocs.sh
|
@$(SCRIPTS_BASE)/tfplugindocs.sh
|
||||||
|
|
||||||
build:
|
build:
|
||||||
|
|
|
||||||
|
|
@ -1,956 +0,0 @@
|
||||||
package build
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bufio"
|
|
||||||
"bytes"
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"go/ast"
|
|
||||||
"go/parser"
|
|
||||||
"go/token"
|
|
||||||
"io"
|
|
||||||
"log"
|
|
||||||
"log/slog"
|
|
||||||
"os"
|
|
||||||
"os/exec"
|
|
||||||
"path"
|
|
||||||
"path/filepath"
|
|
||||||
"regexp"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
"text/template"
|
|
||||||
|
|
||||||
"github.com/ldez/go-git-cmd-wrapper/v2/clone"
|
|
||||||
"github.com/ldez/go-git-cmd-wrapper/v2/git"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
OAS_REPO_NAME = "stackit-api-specifications"
|
|
||||||
OAS_REPO = "https://github.com/stackitcloud/stackit-api-specifications.git"
|
|
||||||
GEN_REPO_NAME = "stackit-sdk-generator"
|
|
||||||
GEN_REPO = "https://github.com/stackitcloud/stackit-sdk-generator.git"
|
|
||||||
)
|
|
||||||
|
|
||||||
type version struct {
|
|
||||||
verString string
|
|
||||||
major int
|
|
||||||
minor int
|
|
||||||
}
|
|
||||||
|
|
||||||
type Builder struct {
|
|
||||||
SkipClone bool
|
|
||||||
SkipCleanup bool
|
|
||||||
PackagesOnly bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *Builder) Build() error {
|
|
||||||
slog.Info("Starting Builder")
|
|
||||||
if b.PackagesOnly {
|
|
||||||
slog.Info(" >>> only generating pkg_gen <<<")
|
|
||||||
}
|
|
||||||
|
|
||||||
root, err := getRoot()
|
|
||||||
if err != nil {
|
|
||||||
log.Fatal(err)
|
|
||||||
}
|
|
||||||
if root == nil || *root == "" {
|
|
||||||
return fmt.Errorf("unable to determine root directory from git")
|
|
||||||
}
|
|
||||||
slog.Info(" ... using root directory", "dir", *root)
|
|
||||||
|
|
||||||
if !b.PackagesOnly {
|
|
||||||
slog.Info(" ... Checking needed commands available")
|
|
||||||
err := checkCommands([]string{"tfplugingen-framework", "tfplugingen-openapi"})
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !b.SkipCleanup {
|
|
||||||
slog.Info("Cleaning up old packages directory")
|
|
||||||
err = os.RemoveAll(path.Join(*root, "pkg_gen"))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !b.SkipCleanup && !b.PackagesOnly {
|
|
||||||
slog.Info("Cleaning up old packages directory")
|
|
||||||
err = os.RemoveAll(path.Join(*root, "pkg_gen"))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
slog.Info("Creating generator dir", "dir", fmt.Sprintf("%s/%s", *root, GEN_REPO_NAME))
|
|
||||||
genDir := path.Join(*root, GEN_REPO_NAME)
|
|
||||||
if !b.SkipClone {
|
|
||||||
err = createGeneratorDir(GEN_REPO, genDir, b.SkipClone)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
slog.Info("Creating oas repo dir", "dir", fmt.Sprintf("%s/%s", *root, OAS_REPO_NAME))
|
|
||||||
repoDir, err := createRepoDir(genDir, OAS_REPO, OAS_REPO_NAME, b.SkipClone)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("%s", err.Error())
|
|
||||||
}
|
|
||||||
|
|
||||||
slog.Info("Retrieving versions from subdirs")
|
|
||||||
// TODO - major
|
|
||||||
verMap, err := getVersions(repoDir)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("%s", err.Error())
|
|
||||||
}
|
|
||||||
|
|
||||||
slog.Info("Reducing to only latest or highest")
|
|
||||||
res, err := getOnlyLatest(verMap)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("%s", err.Error())
|
|
||||||
}
|
|
||||||
|
|
||||||
slog.Info("Creating OAS dir")
|
|
||||||
err = os.MkdirAll(path.Join(genDir, "oas"), 0755)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
slog.Info("Copying OAS files")
|
|
||||||
for service, item := range res {
|
|
||||||
baseService := strings.TrimSuffix(service, "alpha")
|
|
||||||
baseService = strings.TrimSuffix(baseService, "beta")
|
|
||||||
itemVersion := fmt.Sprintf("v%d%s", item.major, item.verString)
|
|
||||||
if item.minor != 0 {
|
|
||||||
itemVersion = itemVersion + "" + strconv.Itoa(item.minor)
|
|
||||||
}
|
|
||||||
srcFile := path.Join(
|
|
||||||
repoDir,
|
|
||||||
"services",
|
|
||||||
baseService,
|
|
||||||
itemVersion,
|
|
||||||
fmt.Sprintf("%s.json", baseService),
|
|
||||||
)
|
|
||||||
dstFile := path.Join(genDir, "oas", fmt.Sprintf("%s.json", service))
|
|
||||||
_, err = copyFile(srcFile, dstFile)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("%s", err.Error())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
slog.Info("Changing dir", "dir", genDir)
|
|
||||||
err = os.Chdir(genDir)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
slog.Info("Calling make", "command", "generate-go-sdk")
|
|
||||||
cmd := exec.Command("make", "generate-go-sdk")
|
|
||||||
var stdOut, stdErr bytes.Buffer
|
|
||||||
cmd.Stdout = &stdOut
|
|
||||||
cmd.Stderr = &stdErr
|
|
||||||
|
|
||||||
if err = cmd.Start(); err != nil {
|
|
||||||
slog.Error("cmd.Start", "error", err)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err = cmd.Wait(); err != nil {
|
|
||||||
var exitErr *exec.ExitError
|
|
||||||
if errors.As(err, &exitErr) {
|
|
||||||
slog.Error("cmd.Wait", "code", exitErr.ExitCode(), "error", err, "stdout", stdOut.String(), "stderr", stdErr.String())
|
|
||||||
return fmt.Errorf("%s", stdErr.String())
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
slog.Error("cmd.Wait", "err", err)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
slog.Info("Cleaning up go.mod and go.sum files")
|
|
||||||
cleanDir := path.Join(genDir, "sdk-repo-updated", "services")
|
|
||||||
dirEntries, err := os.ReadDir(cleanDir)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
for _, entry := range dirEntries {
|
|
||||||
if entry.IsDir() {
|
|
||||||
err = deleteFiles(
|
|
||||||
path.Join(cleanDir, entry.Name(), "go.mod"),
|
|
||||||
path.Join(cleanDir, entry.Name(), "go.sum"),
|
|
||||||
)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
slog.Info("Changing dir", "dir", *root)
|
|
||||||
err = os.Chdir(*root)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
slog.Info("Rearranging package directories")
|
|
||||||
err = os.MkdirAll(path.Join(*root, "pkg_gen"), 0755) // noqa:gosec
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
srcDir := path.Join(genDir, "sdk-repo-updated", "services")
|
|
||||||
items, err := os.ReadDir(srcDir)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
for _, item := range items {
|
|
||||||
if item.IsDir() {
|
|
||||||
slog.Info(" -> package", "name", item.Name())
|
|
||||||
tgtDir := path.Join(*root, "pkg_gen", item.Name())
|
|
||||||
if fileExists(tgtDir) {
|
|
||||||
delErr := os.RemoveAll(tgtDir)
|
|
||||||
if delErr != nil {
|
|
||||||
return delErr
|
|
||||||
}
|
|
||||||
}
|
|
||||||
err = os.Rename(path.Join(srcDir, item.Name()), tgtDir)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !b.PackagesOnly {
|
|
||||||
slog.Info("Generating service boilerplate")
|
|
||||||
err = generateServiceFiles(*root, path.Join(*root, GEN_REPO_NAME))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
slog.Info("Copying all service files")
|
|
||||||
err = CopyDirectory(
|
|
||||||
path.Join(*root, "generated", "internal", "services"),
|
|
||||||
path.Join(*root, "stackit", "internal", "services"),
|
|
||||||
)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
err = createBoilerplate(*root, path.Join(*root, "stackit", "internal", "services"))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !b.SkipCleanup {
|
|
||||||
slog.Info("Finally removing temporary files and directories")
|
|
||||||
err = os.RemoveAll(path.Join(*root, "generated"))
|
|
||||||
if err != nil {
|
|
||||||
slog.Error("RemoveAll", "dir", path.Join(*root, "generated"), "err", err)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
err = os.RemoveAll(path.Join(*root, GEN_REPO_NAME))
|
|
||||||
if err != nil {
|
|
||||||
slog.Error("RemoveAll", "dir", path.Join(*root, GEN_REPO_NAME), "err", err)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
slog.Info("Cleaning up", "dir", repoDir)
|
|
||||||
err = os.RemoveAll(filepath.Dir(repoDir))
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("%s", err.Error())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
slog.Info("Done")
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type templateData struct {
|
|
||||||
PackageName string
|
|
||||||
PackageNameCamel string
|
|
||||||
PackageNamePascal string
|
|
||||||
NameCamel string
|
|
||||||
NamePascal string
|
|
||||||
NameSnake string
|
|
||||||
Fields []string
|
|
||||||
}
|
|
||||||
|
|
||||||
func fileExists(path string) bool {
|
|
||||||
_, err := os.Stat(path)
|
|
||||||
if os.IsNotExist(err) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
func createBoilerplate(rootFolder, folder string) error {
|
|
||||||
services, err := os.ReadDir(folder)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
for _, svc := range services {
|
|
||||||
if !svc.IsDir() {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
resources, err := os.ReadDir(path.Join(folder, svc.Name()))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
var handleDS bool
|
|
||||||
var handleRes bool
|
|
||||||
var foundDS bool
|
|
||||||
var foundRes bool
|
|
||||||
|
|
||||||
for _, res := range resources {
|
|
||||||
if !res.IsDir() {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
resourceName := res.Name()
|
|
||||||
|
|
||||||
dsFile := path.Join(folder, svc.Name(), res.Name(), "datasources_gen", fmt.Sprintf("%s_data_source_gen.go", res.Name()))
|
|
||||||
handleDS = fileExists(dsFile)
|
|
||||||
|
|
||||||
resFile := path.Join(folder, svc.Name(), res.Name(), "resources_gen", fmt.Sprintf("%s_resource_gen.go", res.Name()))
|
|
||||||
handleRes = fileExists(resFile)
|
|
||||||
|
|
||||||
dsGoFile := path.Join(folder, svc.Name(), res.Name(), "datasource.go")
|
|
||||||
foundDS = fileExists(dsGoFile)
|
|
||||||
|
|
||||||
resGoFile := path.Join(folder, svc.Name(), res.Name(), "resource.go")
|
|
||||||
foundRes = fileExists(resGoFile)
|
|
||||||
|
|
||||||
if handleDS && !foundDS {
|
|
||||||
slog.Info(" creating missing datasource.go", "service", svc.Name(), "resource", resourceName)
|
|
||||||
if !ValidateSnakeCase(resourceName) {
|
|
||||||
return errors.New("resource name is invalid")
|
|
||||||
}
|
|
||||||
|
|
||||||
fields, tokenErr := getTokens(dsFile)
|
|
||||||
if tokenErr != nil {
|
|
||||||
return fmt.Errorf("error reading tokens: %w", tokenErr)
|
|
||||||
}
|
|
||||||
|
|
||||||
tplName := "data_source_scaffold.gotmpl"
|
|
||||||
err = writeTemplateToFile(
|
|
||||||
tplName,
|
|
||||||
path.Join(rootFolder, "cmd", "cmd", "build", "templates", tplName),
|
|
||||||
dsGoFile,
|
|
||||||
&templateData{
|
|
||||||
PackageName: svc.Name(),
|
|
||||||
PackageNameCamel: ToCamelCase(svc.Name()),
|
|
||||||
PackageNamePascal: ToPascalCase(svc.Name()),
|
|
||||||
NameCamel: ToCamelCase(resourceName),
|
|
||||||
NamePascal: ToPascalCase(resourceName),
|
|
||||||
NameSnake: resourceName,
|
|
||||||
Fields: fields,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if handleRes && !foundRes {
|
|
||||||
slog.Info(" creating missing resource.go", "service", svc.Name(), "resource", resourceName)
|
|
||||||
if !ValidateSnakeCase(resourceName) {
|
|
||||||
return errors.New("resource name is invalid")
|
|
||||||
}
|
|
||||||
|
|
||||||
fields, tokenErr := getTokens(resFile)
|
|
||||||
if tokenErr != nil {
|
|
||||||
return fmt.Errorf("error reading tokens: %w", tokenErr)
|
|
||||||
}
|
|
||||||
|
|
||||||
tplName := "resource_scaffold.gotmpl"
|
|
||||||
err = writeTemplateToFile(
|
|
||||||
tplName,
|
|
||||||
path.Join(rootFolder, "cmd", "cmd", "build", "templates", tplName),
|
|
||||||
resGoFile,
|
|
||||||
&templateData{
|
|
||||||
PackageName: svc.Name(),
|
|
||||||
PackageNameCamel: ToCamelCase(svc.Name()),
|
|
||||||
PackageNamePascal: ToPascalCase(svc.Name()),
|
|
||||||
NameCamel: ToCamelCase(resourceName),
|
|
||||||
NamePascal: ToPascalCase(resourceName),
|
|
||||||
NameSnake: resourceName,
|
|
||||||
Fields: fields,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if !fileExists(path.Join(folder, svc.Name(), res.Name(), "functions.go")) {
|
|
||||||
slog.Info(" creating missing functions.go", "service", svc.Name(), "resource", resourceName)
|
|
||||||
if !ValidateSnakeCase(resourceName) {
|
|
||||||
return errors.New("resource name is invalid")
|
|
||||||
}
|
|
||||||
fncTplName := "functions_scaffold.gotmpl"
|
|
||||||
err = writeTemplateToFile(
|
|
||||||
fncTplName,
|
|
||||||
path.Join(rootFolder, "cmd", "cmd", "build", "templates", fncTplName),
|
|
||||||
path.Join(folder, svc.Name(), res.Name(), "functions.go"),
|
|
||||||
&templateData{
|
|
||||||
PackageName: svc.Name(),
|
|
||||||
PackageNameCamel: ToCamelCase(svc.Name()),
|
|
||||||
PackageNamePascal: ToPascalCase(svc.Name()),
|
|
||||||
NameCamel: ToCamelCase(resourceName),
|
|
||||||
NamePascal: ToPascalCase(resourceName),
|
|
||||||
NameSnake: resourceName,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func ucfirst(s string) string {
|
|
||||||
if len(s) == 0 {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
return strings.ToUpper(s[:1]) + s[1:]
|
|
||||||
}
|
|
||||||
|
|
||||||
func writeTemplateToFile(tplName, tplFile, outFile string, data *templateData) error {
|
|
||||||
fn := template.FuncMap{
|
|
||||||
"ucfirst": ucfirst,
|
|
||||||
}
|
|
||||||
|
|
||||||
tmpl, err := template.New(tplName).Funcs(fn).ParseFiles(tplFile)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
var f *os.File
|
|
||||||
f, err = os.Create(outFile)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
err = tmpl.Execute(f, *data)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
err = f.Close()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func generateServiceFiles(rootDir, generatorDir string) error {
|
|
||||||
// slog.Info("Generating specs folder")
|
|
||||||
err := os.MkdirAll(path.Join(rootDir, "generated", "specs"), 0755)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
services, err := os.ReadDir(path.Join(rootDir, "service_specs"))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
for _, service := range services {
|
|
||||||
if !service.IsDir() {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
versions, err := os.ReadDir(path.Join(rootDir, "service_specs", service.Name()))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
for _, svcVersion := range versions {
|
|
||||||
if !svcVersion.IsDir() {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: use const of supported versions
|
|
||||||
if svcVersion.Name() != "alpha" && svcVersion.Name() != "beta" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
specFiles, err := os.ReadDir(path.Join(rootDir, "service_specs", service.Name(), svcVersion.Name()))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, specFile := range specFiles {
|
|
||||||
if specFile.IsDir() {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// slog.Info("Checking spec", "name", spec.Name())
|
|
||||||
r := regexp.MustCompile(`^(.*)_config.yml$`)
|
|
||||||
matches := r.FindAllStringSubmatch(specFile.Name(), -1)
|
|
||||||
if matches != nil {
|
|
||||||
fileName := matches[0][0]
|
|
||||||
resource := matches[0][1]
|
|
||||||
slog.Info(
|
|
||||||
" found service spec",
|
|
||||||
"name",
|
|
||||||
specFile.Name(),
|
|
||||||
"service",
|
|
||||||
service.Name(),
|
|
||||||
"resource",
|
|
||||||
resource,
|
|
||||||
)
|
|
||||||
|
|
||||||
oasFile := path.Join(generatorDir, "oas", fmt.Sprintf("%s%s.json", service.Name(), svcVersion.Name()))
|
|
||||||
if _, oasErr := os.Stat(oasFile); os.IsNotExist(oasErr) {
|
|
||||||
slog.Warn(" could not find matching oas", "svc", service.Name(), "version", svcVersion.Name())
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
scName := fmt.Sprintf("%s%s", service.Name(), svcVersion.Name())
|
|
||||||
scName = strings.ReplaceAll(scName, "-", "")
|
|
||||||
err = os.MkdirAll(path.Join(rootDir, "generated", "internal", "services", scName, resource), 0755)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// slog.Info("Generating openapi spec json")
|
|
||||||
specJsonFile := path.Join(rootDir, "generated", "specs", fmt.Sprintf("%s_%s_spec.json", scName, resource))
|
|
||||||
|
|
||||||
var stdOut, stdErr bytes.Buffer
|
|
||||||
|
|
||||||
// noqa:gosec
|
|
||||||
cmd := exec.Command(
|
|
||||||
"tfplugingen-openapi",
|
|
||||||
"generate",
|
|
||||||
"--config",
|
|
||||||
path.Join(rootDir, "service_specs", service.Name(), svcVersion.Name(), fileName),
|
|
||||||
"--output",
|
|
||||||
specJsonFile,
|
|
||||||
oasFile,
|
|
||||||
)
|
|
||||||
cmd.Stdout = &stdOut
|
|
||||||
cmd.Stderr = &stdErr
|
|
||||||
|
|
||||||
if err = cmd.Start(); err != nil {
|
|
||||||
slog.Error(
|
|
||||||
"tfplugingen-openapi generate",
|
|
||||||
"error",
|
|
||||||
err,
|
|
||||||
"stdOut",
|
|
||||||
stdOut.String(),
|
|
||||||
"stdErr",
|
|
||||||
stdErr.String(),
|
|
||||||
)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err = cmd.Wait(); err != nil {
|
|
||||||
var exitErr *exec.ExitError
|
|
||||||
if errors.As(err, &exitErr) {
|
|
||||||
slog.Error("tfplugingen-openapi generate", "code", exitErr.ExitCode(), "error", err, "stdout", stdOut.String(), "stderr", stdErr.String())
|
|
||||||
return fmt.Errorf("%s", stdErr.String())
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
slog.Error("tfplugingen-openapi generate", "err", err, "stdout", stdOut.String(), "stderr", stdErr.String())
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if stdOut.Len() > 0 {
|
|
||||||
slog.Warn(" command output", "stdout", stdOut.String(), "stderr", stdErr.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
// slog.Info("Creating terraform svc resource files folder")
|
|
||||||
tgtFolder := path.Join(rootDir, "generated", "internal", "services", scName, resource, "resources_gen")
|
|
||||||
err = os.MkdirAll(tgtFolder, 0755)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// slog.Info("Generating terraform svc resource files")
|
|
||||||
|
|
||||||
// noqa:gosec
|
|
||||||
cmd2 := exec.Command(
|
|
||||||
"tfplugingen-framework",
|
|
||||||
"generate",
|
|
||||||
"resources",
|
|
||||||
"--input",
|
|
||||||
specJsonFile,
|
|
||||||
"--output",
|
|
||||||
tgtFolder,
|
|
||||||
"--package",
|
|
||||||
scName,
|
|
||||||
)
|
|
||||||
|
|
||||||
cmd2.Stdout = &stdOut
|
|
||||||
cmd2.Stderr = &stdErr
|
|
||||||
if err = cmd2.Start(); err != nil {
|
|
||||||
slog.Error("tfplugingen-framework generate resources", "error", err)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err = cmd2.Wait(); err != nil {
|
|
||||||
var exitErr *exec.ExitError
|
|
||||||
if errors.As(err, &exitErr) {
|
|
||||||
slog.Error("tfplugingen-framework generate resources", "code", exitErr.ExitCode(), "error", err, "stdout", stdOut.String(), "stderr", stdErr.String())
|
|
||||||
return fmt.Errorf("%s", stdErr.String())
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
slog.Error("tfplugingen-framework generate resources", "err", err, "stdout", stdOut.String(), "stderr", stdErr.String())
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// slog.Info("Creating terraform svc datasource files folder")
|
|
||||||
tgtFolder = path.Join(rootDir, "generated", "internal", "services", scName, resource, "datasources_gen")
|
|
||||||
err = os.MkdirAll(tgtFolder, 0755)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// slog.Info("Generating terraform svc resource files")
|
|
||||||
|
|
||||||
// noqa:gosec
|
|
||||||
cmd3 := exec.Command(
|
|
||||||
"tfplugingen-framework",
|
|
||||||
"generate",
|
|
||||||
"data-sources",
|
|
||||||
"--input",
|
|
||||||
specJsonFile,
|
|
||||||
"--output",
|
|
||||||
tgtFolder,
|
|
||||||
"--package",
|
|
||||||
scName,
|
|
||||||
)
|
|
||||||
var stdOut3, stdErr3 bytes.Buffer
|
|
||||||
cmd3.Stdout = &stdOut3
|
|
||||||
cmd3.Stderr = &stdErr3
|
|
||||||
|
|
||||||
if err = cmd3.Start(); err != nil {
|
|
||||||
slog.Error("tfplugingen-framework generate data-sources", "error", err)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err = cmd3.Wait(); err != nil {
|
|
||||||
var exitErr *exec.ExitError
|
|
||||||
if errors.As(err, &exitErr) {
|
|
||||||
slog.Error("tfplugingen-framework generate data-sources", "code", exitErr.ExitCode(), "error", err, "stdout", stdOut.String(), "stderr", stdErr.String())
|
|
||||||
return fmt.Errorf("%s", stdErr.String())
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
slog.Error("tfplugingen-framework generate data-sources", "err", err, "stdout", stdOut.String(), "stderr", stdErr.String())
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
tfAnoErr := handleTfTagForDatasourceFile(
|
|
||||||
path.Join(tgtFolder, fmt.Sprintf("%s_data_source_gen.go", resource)),
|
|
||||||
scName,
|
|
||||||
resource,
|
|
||||||
)
|
|
||||||
if tfAnoErr != nil {
|
|
||||||
return tfAnoErr
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// handleTfTagForDatasourceFile replaces existing "id" with "stf_original_api_id"
|
|
||||||
func handleTfTagForDatasourceFile(filePath, service, resource string) error {
|
|
||||||
slog.Info(" handle terraform tag for datasource", "service", service, "resource", resource)
|
|
||||||
if !fileExists(filePath) {
|
|
||||||
slog.Warn(" could not find file, skipping", "path", filePath)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
f, err := os.Open(filePath)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
defer f.Close()
|
|
||||||
|
|
||||||
root, err := getRoot()
|
|
||||||
if err != nil {
|
|
||||||
log.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
tmp, err := os.CreateTemp(*root, "replace-*")
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
defer tmp.Close()
|
|
||||||
|
|
||||||
sc := bufio.NewScanner(f)
|
|
||||||
for sc.Scan() {
|
|
||||||
resLine, err := handleLine(sc.Text())
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if _, err := tmp.WriteString(resLine + "\n"); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if scErr := sc.Err(); scErr != nil {
|
|
||||||
return scErr
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := tmp.Close(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := f.Close(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := os.Rename(tmp.Name(), filePath); err != nil {
|
|
||||||
log.Fatal(err)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func handleLine(line string) (string, error) {
|
|
||||||
schemaRegex := regexp.MustCompile(`(\s+")(id)(": schema.[a-zA-Z0-9]+Attribute{)`)
|
|
||||||
|
|
||||||
schemaMatches := schemaRegex.FindAllStringSubmatch(line, -1)
|
|
||||||
if schemaMatches != nil {
|
|
||||||
return fmt.Sprintf("%stf_original_api_id%s", schemaMatches[0][1], schemaMatches[0][3]), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
modelRegex := regexp.MustCompile(`(\s+Id\s+types.[a-zA-Z0-9]+\s+.tfsdk:")(id)(".)`)
|
|
||||||
modelMatches := modelRegex.FindAllStringSubmatch(line, -1)
|
|
||||||
if modelMatches != nil {
|
|
||||||
return fmt.Sprintf("%stf_original_api_id%s", modelMatches[0][1], modelMatches[0][3]), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return line, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func checkCommands(commands []string) error {
|
|
||||||
for _, commandName := range commands {
|
|
||||||
if !commandExists(commandName) {
|
|
||||||
return fmt.Errorf("missing command %s", commandName)
|
|
||||||
}
|
|
||||||
slog.Info(" found", "command", commandName)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func commandExists(cmd string) bool {
|
|
||||||
_, err := exec.LookPath(cmd)
|
|
||||||
return err == nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func deleteFiles(fNames ...string) error {
|
|
||||||
for _, fName := range fNames {
|
|
||||||
if _, err := os.Stat(fName); !os.IsNotExist(err) {
|
|
||||||
err = os.Remove(fName)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func copyFile(src, dst string) (int64, error) {
|
|
||||||
sourceFileStat, err := os.Stat(src)
|
|
||||||
if err != nil {
|
|
||||||
return 0, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if !sourceFileStat.Mode().IsRegular() {
|
|
||||||
return 0, fmt.Errorf("%s is not a regular file", src)
|
|
||||||
}
|
|
||||||
|
|
||||||
source, err := os.Open(src)
|
|
||||||
if err != nil {
|
|
||||||
return 0, err
|
|
||||||
}
|
|
||||||
defer source.Close()
|
|
||||||
|
|
||||||
destination, err := os.Create(dst)
|
|
||||||
if err != nil {
|
|
||||||
return 0, err
|
|
||||||
}
|
|
||||||
defer destination.Close()
|
|
||||||
nBytes, err := io.Copy(destination, source)
|
|
||||||
return nBytes, err
|
|
||||||
}
|
|
||||||
|
|
||||||
func getOnlyLatest(m map[string]version) (map[string]version, error) {
|
|
||||||
tmpMap := make(map[string]version)
|
|
||||||
for k, v := range m {
|
|
||||||
item, ok := tmpMap[k]
|
|
||||||
if !ok {
|
|
||||||
tmpMap[k] = v
|
|
||||||
} else {
|
|
||||||
if item.major == v.major && item.minor < v.minor {
|
|
||||||
tmpMap[k] = v
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return tmpMap, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func getVersions(dir string) (map[string]version, error) {
|
|
||||||
res := make(map[string]version)
|
|
||||||
children, err := os.ReadDir(path.Join(dir, "services"))
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, entry := range children {
|
|
||||||
if entry.IsDir() {
|
|
||||||
versions, err := os.ReadDir(path.Join(dir, "services", entry.Name()))
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
m, err2 := extractVersions(entry.Name(), versions)
|
|
||||||
if err2 != nil {
|
|
||||||
return m, err2
|
|
||||||
}
|
|
||||||
for k, v := range m {
|
|
||||||
res[k] = v
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return res, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func extractVersions(service string, versionDirs []os.DirEntry) (map[string]version, error) {
|
|
||||||
res := make(map[string]version)
|
|
||||||
for _, vDir := range versionDirs {
|
|
||||||
if vDir.IsDir() {
|
|
||||||
r := regexp.MustCompile(`v([0-9]+)([a-z]+)([0-9]*)`)
|
|
||||||
matches := r.FindAllStringSubmatch(vDir.Name(), -1)
|
|
||||||
if matches == nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
svc, ver, err := handleVersion(service, matches[0])
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if svc != nil && ver != nil {
|
|
||||||
res[*svc] = *ver
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return res, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func handleVersion(service string, match []string) (*string, *version, error) {
|
|
||||||
if match == nil {
|
|
||||||
fmt.Println("no matches")
|
|
||||||
return nil, nil, nil
|
|
||||||
}
|
|
||||||
verString := match[2]
|
|
||||||
if verString != "alpha" && verString != "beta" {
|
|
||||||
return nil, nil, errors.New("unsupported version")
|
|
||||||
}
|
|
||||||
majVer, err := strconv.Atoi(match[1])
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
if match[3] == "" {
|
|
||||||
match[3] = "0"
|
|
||||||
}
|
|
||||||
minVer, err := strconv.Atoi(match[3])
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
resStr := fmt.Sprintf("%s%s", service, verString)
|
|
||||||
return &resStr, &version{verString: verString, major: majVer, minor: minVer}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func createRepoDir(root, repoUrl, repoName string, skipClone bool) (string, error) {
|
|
||||||
targetDir := path.Join(root, repoName)
|
|
||||||
if !skipClone {
|
|
||||||
if fileExists(targetDir) {
|
|
||||||
slog.Warn("target dir exists - skipping", "targetDir", targetDir)
|
|
||||||
return targetDir, nil
|
|
||||||
}
|
|
||||||
_, err := git.Clone(
|
|
||||||
clone.Repository(repoUrl),
|
|
||||||
clone.Directory(targetDir),
|
|
||||||
)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return targetDir, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func createGeneratorDir(repoUrl, targetDir string, skipClone bool) error {
|
|
||||||
if !skipClone {
|
|
||||||
if fileExists(targetDir) {
|
|
||||||
remErr := os.RemoveAll(targetDir)
|
|
||||||
if remErr != nil {
|
|
||||||
return remErr
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_, cloneErr := git.Clone(
|
|
||||||
clone.Repository(repoUrl),
|
|
||||||
clone.Directory(targetDir),
|
|
||||||
)
|
|
||||||
if cloneErr != nil {
|
|
||||||
return cloneErr
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func getRoot() (*string, error) {
|
|
||||||
cmd := exec.Command("git", "rev-parse", "--show-toplevel")
|
|
||||||
out, err := cmd.Output()
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
lines := strings.Split(string(out), "\n")
|
|
||||||
return &lines[0], nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func getTokens(fileName string) ([]string, error) {
|
|
||||||
fset := token.NewFileSet()
|
|
||||||
|
|
||||||
var result []string
|
|
||||||
|
|
||||||
node, err := parser.ParseFile(fset, fileName, nil, parser.ParseComments)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
ast.Inspect(node, func(n ast.Node) bool {
|
|
||||||
// Suche nach Typ-Deklarationen (structs)
|
|
||||||
ts, ok := n.(*ast.TypeSpec)
|
|
||||||
if ok {
|
|
||||||
if strings.Contains(ts.Name.Name, "Model") {
|
|
||||||
// fmt.Printf("found model: %s\n", ts.Name.Name)
|
|
||||||
ast.Inspect(ts, func(sn ast.Node) bool {
|
|
||||||
tts, tok := sn.(*ast.Field)
|
|
||||||
if tok {
|
|
||||||
// fmt.Printf(" found: %+v\n", tts.Names[0])
|
|
||||||
// spew.Dump(tts.Type)
|
|
||||||
|
|
||||||
result = append(result, tts.Names[0].String())
|
|
||||||
|
|
||||||
// fld, fldOk := tts.Type.(*ast.Ident)
|
|
||||||
//if fldOk {
|
|
||||||
// fmt.Printf("type: %+v\n", fld)
|
|
||||||
//}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
})
|
|
||||||
return result, nil
|
|
||||||
}
|
|
||||||
|
|
@ -31,8 +31,8 @@ data "stackitprivatepreview_postgresflexalpha_instance" "example" {
|
||||||
### Read-Only
|
### Read-Only
|
||||||
|
|
||||||
- `acl` (List of String) List of IPV4 cidr.
|
- `acl` (List of String) List of IPV4 cidr.
|
||||||
- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
|
- `backup_schedule` (String) The schedule for when the database backup will be created. Currently, ONLY daily schedules are supported (every 24 hours). The schedule is written as a cron schedule.
|
||||||
- `connection_info` (Attributes) The DNS name and port in the instance overview (see [below for nested schema](#nestedatt--connection_info))
|
- `connection_info` (Attributes) The connection information of the instance (see [below for nested schema](#nestedatt--connection_info))
|
||||||
- `encryption` (Attributes) The configuration for instance's volume and backup storage encryption.
|
- `encryption` (Attributes) The configuration for instance's volume and backup storage encryption.
|
||||||
|
|
||||||
⚠ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected. (see [below for nested schema](#nestedatt--encryption))
|
⚠ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected. (see [below for nested schema](#nestedatt--encryption))
|
||||||
|
|
@ -52,10 +52,18 @@ data "stackitprivatepreview_postgresflexalpha_instance" "example" {
|
||||||
|
|
||||||
Read-Only:
|
Read-Only:
|
||||||
|
|
||||||
|
- `write` (Attributes) The DNS name and port in the instance overview (see [below for nested schema](#nestedatt--connection_info--write))
|
||||||
|
|
||||||
|
<a id="nestedatt--connection_info--write"></a>
|
||||||
|
### Nested Schema for `connection_info.write`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
- `host` (String) The host of the instance.
|
- `host` (String) The host of the instance.
|
||||||
- `port` (Number) The port of the instance.
|
- `port` (Number) The port of the instance.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
<a id="nestedatt--encryption"></a>
|
<a id="nestedatt--encryption"></a>
|
||||||
### Nested Schema for `encryption`
|
### Nested Schema for `encryption`
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,54 +0,0 @@
|
||||||
---
|
|
||||||
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
|
||||||
page_title: "stackitprivatepreview_sqlserverflexalpha_flavor Data Source - stackitprivatepreview"
|
|
||||||
subcategory: ""
|
|
||||||
description: |-
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# stackitprivatepreview_sqlserverflexalpha_flavor (Data Source)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Example Usage
|
|
||||||
|
|
||||||
```terraform
|
|
||||||
data "stackitprivatepreview_sqlserverflexalpha_flavor" "flavor" {
|
|
||||||
project_id = var.project_id
|
|
||||||
region = var.region
|
|
||||||
cpu = 4
|
|
||||||
ram = 16
|
|
||||||
node_type = "Single"
|
|
||||||
storage_class = "premium-perf2-stackit"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
<!-- schema generated by tfplugindocs -->
|
|
||||||
## Schema
|
|
||||||
|
|
||||||
### Required
|
|
||||||
|
|
||||||
- `cpu` (Number) The cpu count of the instance.
|
|
||||||
- `node_type` (String) defines the nodeType it can be either single or HA
|
|
||||||
- `project_id` (String) The project ID of the flavor.
|
|
||||||
- `ram` (Number) The memory of the instance in Gibibyte.
|
|
||||||
- `region` (String) The region of the flavor.
|
|
||||||
- `storage_class` (String) The memory of the instance in Gibibyte.
|
|
||||||
|
|
||||||
### Read-Only
|
|
||||||
|
|
||||||
- `description` (String) The flavor description.
|
|
||||||
- `flavor_id` (String) The id of the instance flavor.
|
|
||||||
- `id` (String) The id of the instance flavor.
|
|
||||||
- `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte.
|
|
||||||
- `min_gb` (Number) minimum storage which is required to order in Gigabyte.
|
|
||||||
- `storage_classes` (Attributes List) maximum storage which can be ordered for the flavor in Gigabyte. (see [below for nested schema](#nestedatt--storage_classes))
|
|
||||||
|
|
||||||
<a id="nestedatt--storage_classes"></a>
|
|
||||||
### Nested Schema for `storage_classes`
|
|
||||||
|
|
||||||
Read-Only:
|
|
||||||
|
|
||||||
- `class` (String)
|
|
||||||
- `max_io_per_sec` (Number)
|
|
||||||
- `max_through_in_mb` (Number)
|
|
||||||
|
|
@ -1,54 +0,0 @@
|
||||||
---
|
|
||||||
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
|
||||||
page_title: "stackitprivatepreview_sqlserverflexbeta_flavor Data Source - stackitprivatepreview"
|
|
||||||
subcategory: ""
|
|
||||||
description: |-
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# stackitprivatepreview_sqlserverflexbeta_flavor (Data Source)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Example Usage
|
|
||||||
|
|
||||||
```terraform
|
|
||||||
data "stackitprivatepreview_sqlserverflexbeta_flavor" "flavor" {
|
|
||||||
project_id = var.project_id
|
|
||||||
region = var.region
|
|
||||||
cpu = 4
|
|
||||||
ram = 16
|
|
||||||
node_type = "Single"
|
|
||||||
storage_class = "premium-perf2-stackit"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
<!-- schema generated by tfplugindocs -->
|
|
||||||
## Schema
|
|
||||||
|
|
||||||
### Required
|
|
||||||
|
|
||||||
- `cpu` (Number) The cpu count of the instance.
|
|
||||||
- `node_type` (String) defines the nodeType it can be either single or HA
|
|
||||||
- `project_id` (String) The project ID of the flavor.
|
|
||||||
- `ram` (Number) The memory of the instance in Gibibyte.
|
|
||||||
- `region` (String) The region of the flavor.
|
|
||||||
- `storage_class` (String) The memory of the instance in Gibibyte.
|
|
||||||
|
|
||||||
### Read-Only
|
|
||||||
|
|
||||||
- `description` (String) The flavor description.
|
|
||||||
- `flavor_id` (String) The id of the instance flavor.
|
|
||||||
- `id` (String) The id of the instance flavor.
|
|
||||||
- `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte.
|
|
||||||
- `min_gb` (Number) minimum storage which is required to order in Gigabyte.
|
|
||||||
- `storage_classes` (Attributes List) maximum storage which can be ordered for the flavor in Gigabyte. (see [below for nested schema](#nestedatt--storage_classes))
|
|
||||||
|
|
||||||
<a id="nestedatt--storage_classes"></a>
|
|
||||||
### Nested Schema for `storage_classes`
|
|
||||||
|
|
||||||
Read-Only:
|
|
||||||
|
|
||||||
- `class` (String)
|
|
||||||
- `max_io_per_sec` (Number)
|
|
||||||
- `max_through_in_mb` (Number)
|
|
||||||
54
docs/data-sources/sqlserverflexbeta_user.md
Normal file
54
docs/data-sources/sqlserverflexbeta_user.md
Normal file
|
|
@ -0,0 +1,54 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexbeta_user Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexbeta_user (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `page` (Number) Number of the page of items list to be returned.
|
||||||
|
- `size` (Number) Number of items to be returned on each page.
|
||||||
|
- `sort` (String) Sorting of the users to be returned on each page.
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `pagination` (Attributes) (see [below for nested schema](#nestedatt--pagination))
|
||||||
|
- `users` (Attributes List) List of all users inside an instance (see [below for nested schema](#nestedatt--users))
|
||||||
|
|
||||||
|
<a id="nestedatt--pagination"></a>
|
||||||
|
### Nested Schema for `pagination`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `page` (Number)
|
||||||
|
- `size` (Number)
|
||||||
|
- `sort` (String)
|
||||||
|
- `total_pages` (Number)
|
||||||
|
- `total_rows` (Number)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--users"></a>
|
||||||
|
### Nested Schema for `users`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `status` (String) The current status of the user.
|
||||||
|
- `tf_original_api_id` (Number) The ID of the user.
|
||||||
|
- `username` (String) The name of the user.
|
||||||
|
|
@ -25,6 +25,16 @@ import {
|
||||||
to = stackitprivatepreview_postgresflexalpha_database.import-example
|
to = stackitprivatepreview_postgresflexalpha_database.import-example
|
||||||
id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.postgres_database_id}"
|
id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.postgres_database_id}"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_postgresflexalpha_database.import-example
|
||||||
|
identity = {
|
||||||
|
project_id = "project_id"
|
||||||
|
region = "region"
|
||||||
|
instance_id = "instance_id"
|
||||||
|
database_id = "database_id"
|
||||||
|
}
|
||||||
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
<!-- schema generated by tfplugindocs -->
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
|
|
||||||
|
|
@ -13,7 +13,7 @@ description: |-
|
||||||
## Example Usage
|
## Example Usage
|
||||||
|
|
||||||
```terraform
|
```terraform
|
||||||
resource "stackitprivatepreview_postgresflexalpha_instance" "msh-instance-only" {
|
resource "stackitprivatepreview_postgresflexalpha_instance" "example-instance" {
|
||||||
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
name = "example-instance"
|
name = "example-instance"
|
||||||
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
||||||
|
|
@ -59,7 +59,7 @@ import {
|
||||||
|
|
||||||
### Required
|
### Required
|
||||||
|
|
||||||
- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
|
- `backup_schedule` (String) The schedule for when the database backup will be created. Currently, ONLY daily schedules are supported (every 24 hours). The schedule is written as a cron schedule.
|
||||||
- `flavor_id` (String) The id of the instance flavor.
|
- `flavor_id` (String) The id of the instance flavor.
|
||||||
- `name` (String) The name of the instance.
|
- `name` (String) The name of the instance.
|
||||||
- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
|
- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
|
||||||
|
|
@ -80,7 +80,7 @@ import {
|
||||||
### Read-Only
|
### Read-Only
|
||||||
|
|
||||||
- `acl` (List of String) List of IPV4 cidr.
|
- `acl` (List of String) List of IPV4 cidr.
|
||||||
- `connection_info` (Attributes) The DNS name and port in the instance overview (see [below for nested schema](#nestedatt--connection_info))
|
- `connection_info` (Attributes) The connection information of the instance (see [below for nested schema](#nestedatt--connection_info))
|
||||||
- `id` (String) The ID of the instance.
|
- `id` (String) The ID of the instance.
|
||||||
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
|
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
|
||||||
- `status` (String) The current status of the instance.
|
- `status` (String) The current status of the instance.
|
||||||
|
|
@ -127,5 +127,12 @@ Required:
|
||||||
|
|
||||||
Read-Only:
|
Read-Only:
|
||||||
|
|
||||||
|
- `write` (Attributes) The DNS name and port in the instance overview (see [below for nested schema](#nestedatt--connection_info--write))
|
||||||
|
|
||||||
|
<a id="nestedatt--connection_info--write"></a>
|
||||||
|
### Nested Schema for `connection_info.write`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
- `host` (String) The host of the instance.
|
- `host` (String) The host of the instance.
|
||||||
- `port` (Number) The port of the instance.
|
- `port` (Number) The port of the instance.
|
||||||
|
|
|
||||||
|
|
@ -25,6 +25,16 @@ import {
|
||||||
to = stackitprivatepreview_postgresflexalpha_user.import-example
|
to = stackitprivatepreview_postgresflexalpha_user.import-example
|
||||||
id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.user_id}"
|
id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.user_id}"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_postgresflexalpha_user.import-example
|
||||||
|
identity = {
|
||||||
|
project_id = "project.id"
|
||||||
|
region = "region"
|
||||||
|
instance_id = "instance.id"
|
||||||
|
user_id = "user.id"
|
||||||
|
}
|
||||||
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
<!-- schema generated by tfplugindocs -->
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,34 @@ description: |-
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
resource "stackitprivatepreview_sqlserverflexalpha_database" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
collation = ""
|
||||||
|
compatibility = "160"
|
||||||
|
name = ""
|
||||||
|
owner = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
# Only use the import statement, if you want to import a existing sqlserverflex database
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_sqlserverflexalpha_database.import-example
|
||||||
|
id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
|
||||||
|
}
|
||||||
|
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_sqlserverflexalpha_database.import-example
|
||||||
|
identity = {
|
||||||
|
project_id = "project.id"
|
||||||
|
region = "region"
|
||||||
|
instance_id = "instance.id"
|
||||||
|
database_id = "database.id"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
<!-- schema generated by tfplugindocs -->
|
<!-- schema generated by tfplugindocs -->
|
||||||
## Schema
|
## Schema
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,22 @@ description: |-
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
resource "stackitprivatepreview_sqlserverflexalpha_user" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
username = "username"
|
||||||
|
roles = ["role"]
|
||||||
|
}
|
||||||
|
|
||||||
|
# Only use the import statement, if you want to import an existing sqlserverflex user
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_sqlserverflexalpha_user.import-example
|
||||||
|
id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
<!-- schema generated by tfplugindocs -->
|
<!-- schema generated by tfplugindocs -->
|
||||||
## Schema
|
## Schema
|
||||||
|
|
|
||||||
53
docs/resources/sqlserverflexbeta_user.md
Normal file
53
docs/resources/sqlserverflexbeta_user.md
Normal file
|
|
@ -0,0 +1,53 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexbeta_user Resource - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexbeta_user (Resource)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
resource "stackitprivatepreview_sqlserverflexalpha_user" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
username = "username"
|
||||||
|
roles = ["role"]
|
||||||
|
}
|
||||||
|
|
||||||
|
# Only use the import statement, if you want to import an existing sqlserverflex user
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_sqlserverflexalpha_user.import-example
|
||||||
|
id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `roles` (List of String) A list containing the user roles for the instance. A list with the valid user roles can be retrieved using the List Roles endpoint.
|
||||||
|
- `username` (String) The name of the user.
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `default_database` (String) The default database for a user of the instance.
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
- `user_id` (Number) The ID of the user.
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `host` (String) The host of the instance in which the user belongs to.
|
||||||
|
- `id` (Number) The ID of the user.
|
||||||
|
- `password` (String) The password for the user.
|
||||||
|
- `port` (Number) The port of the instance in which the user belongs to.
|
||||||
|
- `status` (String) The current status of the user.
|
||||||
|
- `uri` (String) The connection string for the user to the instance.
|
||||||
|
|
@ -10,3 +10,13 @@ import {
|
||||||
to = stackitprivatepreview_postgresflexalpha_database.import-example
|
to = stackitprivatepreview_postgresflexalpha_database.import-example
|
||||||
id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.postgres_database_id}"
|
id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.postgres_database_id}"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_postgresflexalpha_database.import-example
|
||||||
|
identity = {
|
||||||
|
project_id = "project_id"
|
||||||
|
region = "region"
|
||||||
|
instance_id = "instance_id"
|
||||||
|
database_id = "database_id"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
resource "stackitprivatepreview_postgresflexalpha_instance" "msh-instance-only" {
|
resource "stackitprivatepreview_postgresflexalpha_instance" "example-instance" {
|
||||||
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
name = "example-instance"
|
name = "example-instance"
|
||||||
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
||||||
|
|
|
||||||
|
|
@ -10,3 +10,13 @@ import {
|
||||||
to = stackitprivatepreview_postgresflexalpha_user.import-example
|
to = stackitprivatepreview_postgresflexalpha_user.import-example
|
||||||
id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.user_id}"
|
id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.user_id}"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_postgresflexalpha_user.import-example
|
||||||
|
identity = {
|
||||||
|
project_id = "project.id"
|
||||||
|
region = "region"
|
||||||
|
instance_id = "instance.id"
|
||||||
|
user_id = "user.id"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,24 @@
|
||||||
|
resource "stackitprivatepreview_sqlserverflexalpha_database" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
collation = ""
|
||||||
|
compatibility = "160"
|
||||||
|
name = ""
|
||||||
|
owner = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
# Only use the import statement, if you want to import a existing sqlserverflex database
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_sqlserverflexalpha_database.import-example
|
||||||
|
id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
|
||||||
|
}
|
||||||
|
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_sqlserverflexalpha_database.import-example
|
||||||
|
identity = {
|
||||||
|
project_id = "project.id"
|
||||||
|
region = "region"
|
||||||
|
instance_id = "instance.id"
|
||||||
|
database_id = "database.id"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,12 @@
|
||||||
|
resource "stackitprivatepreview_sqlserverflexalpha_user" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
username = "username"
|
||||||
|
roles = ["role"]
|
||||||
|
}
|
||||||
|
|
||||||
|
# Only use the import statement, if you want to import an existing sqlserverflex user
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_sqlserverflexalpha_user.import-example
|
||||||
|
id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,12 @@
|
||||||
|
resource "stackitprivatepreview_sqlserverflexalpha_user" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
username = "username"
|
||||||
|
roles = ["role"]
|
||||||
|
}
|
||||||
|
|
||||||
|
# Only use the import statement, if you want to import an existing sqlserverflex user
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_sqlserverflexalpha_user.import-example
|
||||||
|
id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
|
||||||
|
}
|
||||||
341
generator/cmd/build/build.go
Normal file
341
generator/cmd/build/build.go
Normal file
|
|
@ -0,0 +1,341 @@
|
||||||
|
package build
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/parser"
|
||||||
|
"go/token"
|
||||||
|
"log/slog"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/generator/cmd/tools"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Builder struct {
|
||||||
|
rootDir string
|
||||||
|
SkipClone bool
|
||||||
|
SkipCleanup bool
|
||||||
|
PackagesOnly bool
|
||||||
|
Verbose bool
|
||||||
|
Debug bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) Build() error {
|
||||||
|
slog.Info("Starting Builder")
|
||||||
|
if b.PackagesOnly {
|
||||||
|
slog.Info(" >>> only generating pkg_gen <<<")
|
||||||
|
}
|
||||||
|
|
||||||
|
rootErr := b.determineRoot()
|
||||||
|
if rootErr != nil {
|
||||||
|
return rootErr
|
||||||
|
}
|
||||||
|
|
||||||
|
if !b.PackagesOnly {
|
||||||
|
if b.Verbose {
|
||||||
|
slog.Info(" ... Checking needed commands available")
|
||||||
|
}
|
||||||
|
chkErr := checkCommands([]string{})
|
||||||
|
if chkErr != nil {
|
||||||
|
return chkErr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// if !b.SkipCleanup {
|
||||||
|
// slog.Info("Cleaning up old packages directory")
|
||||||
|
// err := os.RemoveAll(path.Join(b.rootDir, "pkg_gen"))
|
||||||
|
// if err != nil {
|
||||||
|
// return err
|
||||||
|
// }
|
||||||
|
//}
|
||||||
|
//
|
||||||
|
// if !b.SkipCleanup && !b.PackagesOnly {
|
||||||
|
// slog.Info("Cleaning up old packages directory")
|
||||||
|
// err := os.RemoveAll(path.Join(b.rootDir, "pkg_gen"))
|
||||||
|
// if err != nil {
|
||||||
|
// return err
|
||||||
|
// }
|
||||||
|
//}
|
||||||
|
|
||||||
|
// slog.Info("Creating generator dir", "dir", fmt.Sprintf("%s/%s", *root, GEN_REPO_NAME))
|
||||||
|
// genDir := path.Join(*root, GEN_REPO_NAME)
|
||||||
|
// if !b.SkipClone {
|
||||||
|
// err = createGeneratorDir(GEN_REPO, genDir, b.SkipClone)
|
||||||
|
// if err != nil {
|
||||||
|
// return err
|
||||||
|
// }
|
||||||
|
//}
|
||||||
|
|
||||||
|
oasHandlerErr := b.oasHandler(path.Join(b.rootDir, "service_specs"))
|
||||||
|
if oasHandlerErr != nil {
|
||||||
|
return oasHandlerErr
|
||||||
|
}
|
||||||
|
|
||||||
|
// if !b.PackagesOnly {
|
||||||
|
// slog.Info("Generating service boilerplate")
|
||||||
|
// err = generateServiceFiles(*root, path.Join(*root, GEN_REPO_NAME))
|
||||||
|
// if err != nil {
|
||||||
|
// return err
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// slog.Info("Copying all service files")
|
||||||
|
// err = CopyDirectory(
|
||||||
|
// path.Join(*root, "generated", "internal", "services"),
|
||||||
|
// path.Join(*root, "stackit", "internal", "services"),
|
||||||
|
// )
|
||||||
|
// if err != nil {
|
||||||
|
// return err
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// err = createBoilerplate(*root, path.Join(*root, "stackit", "internal", "services"))
|
||||||
|
// if err != nil {
|
||||||
|
// return err
|
||||||
|
// }
|
||||||
|
//}
|
||||||
|
|
||||||
|
// workaround to remove linter complain :D
|
||||||
|
if b.PackagesOnly && b.Verbose && b.SkipClone && b.SkipCleanup {
|
||||||
|
bpErr := createBoilerplate(b.rootDir, "boilerplate")
|
||||||
|
if bpErr != nil {
|
||||||
|
return bpErr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
slog.Info("Done")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type templateData struct {
|
||||||
|
PackageName string
|
||||||
|
PackageNameCamel string
|
||||||
|
PackageNamePascal string
|
||||||
|
NameCamel string
|
||||||
|
NamePascal string
|
||||||
|
NameSnake string
|
||||||
|
Fields []string
|
||||||
|
}
|
||||||
|
|
||||||
|
func createBoilerplate(rootFolder, folder string) error {
|
||||||
|
services, err := os.ReadDir(folder)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
for _, svc := range services {
|
||||||
|
if !svc.IsDir() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
resources, err := os.ReadDir(path.Join(folder, svc.Name()))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
var handleDS bool
|
||||||
|
var handleRes bool
|
||||||
|
var foundDS bool
|
||||||
|
var foundRes bool
|
||||||
|
|
||||||
|
for _, res := range resources {
|
||||||
|
if !res.IsDir() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
resourceName := res.Name()
|
||||||
|
|
||||||
|
dsFile := path.Join(
|
||||||
|
folder,
|
||||||
|
svc.Name(),
|
||||||
|
res.Name(),
|
||||||
|
"datasources_gen",
|
||||||
|
fmt.Sprintf("%s_data_source_gen.go", res.Name()),
|
||||||
|
)
|
||||||
|
handleDS = FileExists(dsFile)
|
||||||
|
|
||||||
|
resFile := path.Join(
|
||||||
|
folder,
|
||||||
|
svc.Name(),
|
||||||
|
res.Name(),
|
||||||
|
"resources_gen",
|
||||||
|
fmt.Sprintf("%s_resource_gen.go", res.Name()),
|
||||||
|
)
|
||||||
|
handleRes = FileExists(resFile)
|
||||||
|
|
||||||
|
dsGoFile := path.Join(folder, svc.Name(), res.Name(), "datasource.go")
|
||||||
|
foundDS = FileExists(dsGoFile)
|
||||||
|
|
||||||
|
resGoFile := path.Join(folder, svc.Name(), res.Name(), "resource.go")
|
||||||
|
foundRes = FileExists(resGoFile)
|
||||||
|
|
||||||
|
if handleDS && !foundDS {
|
||||||
|
slog.Info(" creating missing datasource.go", "service", svc.Name(), "resource", resourceName)
|
||||||
|
if !ValidateSnakeCase(resourceName) {
|
||||||
|
return errors.New("resource name is invalid")
|
||||||
|
}
|
||||||
|
|
||||||
|
fields, tokenErr := getTokens(dsFile)
|
||||||
|
if tokenErr != nil {
|
||||||
|
return fmt.Errorf("error reading tokens: %w", tokenErr)
|
||||||
|
}
|
||||||
|
|
||||||
|
tplName := "data_source_scaffold.gotmpl"
|
||||||
|
err = writeTemplateToFile(
|
||||||
|
tplName,
|
||||||
|
path.Join(rootFolder, "cmd", "cmd", "build", "templates", tplName),
|
||||||
|
dsGoFile,
|
||||||
|
&templateData{
|
||||||
|
PackageName: svc.Name(),
|
||||||
|
PackageNameCamel: ToCamelCase(svc.Name()),
|
||||||
|
PackageNamePascal: ToPascalCase(svc.Name()),
|
||||||
|
NameCamel: ToCamelCase(resourceName),
|
||||||
|
NamePascal: ToPascalCase(resourceName),
|
||||||
|
NameSnake: resourceName,
|
||||||
|
Fields: fields,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if handleRes && !foundRes {
|
||||||
|
slog.Info(" creating missing resource.go", "service", svc.Name(), "resource", resourceName)
|
||||||
|
if !ValidateSnakeCase(resourceName) {
|
||||||
|
return errors.New("resource name is invalid")
|
||||||
|
}
|
||||||
|
|
||||||
|
fields, tokenErr := getTokens(resFile)
|
||||||
|
if tokenErr != nil {
|
||||||
|
return fmt.Errorf("error reading tokens: %w", tokenErr)
|
||||||
|
}
|
||||||
|
|
||||||
|
tplName := "resource_scaffold.gotmpl"
|
||||||
|
err = writeTemplateToFile(
|
||||||
|
tplName,
|
||||||
|
path.Join(rootFolder, "cmd", "cmd", "build", "templates", tplName),
|
||||||
|
resGoFile,
|
||||||
|
&templateData{
|
||||||
|
PackageName: svc.Name(),
|
||||||
|
PackageNameCamel: ToCamelCase(svc.Name()),
|
||||||
|
PackageNamePascal: ToPascalCase(svc.Name()),
|
||||||
|
NameCamel: ToCamelCase(resourceName),
|
||||||
|
NamePascal: ToPascalCase(resourceName),
|
||||||
|
NameSnake: resourceName,
|
||||||
|
Fields: fields,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if !FileExists(path.Join(folder, svc.Name(), res.Name(), "functions.go")) {
|
||||||
|
slog.Info(" creating missing functions.go", "service", svc.Name(), "resource", resourceName)
|
||||||
|
if !ValidateSnakeCase(resourceName) {
|
||||||
|
return errors.New("resource name is invalid")
|
||||||
|
}
|
||||||
|
fncTplName := "functions_scaffold.gotmpl"
|
||||||
|
err = writeTemplateToFile(
|
||||||
|
fncTplName,
|
||||||
|
path.Join(rootFolder, "cmd", "cmd", "build", "templates", fncTplName),
|
||||||
|
path.Join(folder, svc.Name(), res.Name(), "functions.go"),
|
||||||
|
&templateData{
|
||||||
|
PackageName: svc.Name(),
|
||||||
|
PackageNameCamel: ToCamelCase(svc.Name()),
|
||||||
|
PackageNamePascal: ToPascalCase(svc.Name()),
|
||||||
|
NameCamel: ToCamelCase(resourceName),
|
||||||
|
NamePascal: ToPascalCase(resourceName),
|
||||||
|
NameSnake: resourceName,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func handleLine(line string) (string, error) {
|
||||||
|
schemaRegex := regexp.MustCompile(`(\s+")(id)(": schema.[a-zA-Z0-9]+Attribute{)`)
|
||||||
|
|
||||||
|
schemaMatches := schemaRegex.FindAllStringSubmatch(line, -1)
|
||||||
|
if schemaMatches != nil {
|
||||||
|
return fmt.Sprintf("%stf_original_api_id%s", schemaMatches[0][1], schemaMatches[0][3]), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
modelRegex := regexp.MustCompile(`(\s+Id\s+types.[a-zA-Z0-9]+\s+.tfsdk:")(id)(".)`)
|
||||||
|
modelMatches := modelRegex.FindAllStringSubmatch(line, -1)
|
||||||
|
if modelMatches != nil {
|
||||||
|
return fmt.Sprintf("%stf_original_api_id%s", modelMatches[0][1], modelMatches[0][3]), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return line, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) determineRoot() error {
|
||||||
|
root, err := tools.GetGitRoot()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
b.rootDir = root
|
||||||
|
if b.Verbose {
|
||||||
|
slog.Info(" ... using root", "dir", b.rootDir)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// func createGeneratorDir(repoUrl, targetDir string, skipClone bool) error {
|
||||||
|
// if !skipClone {
|
||||||
|
// if FileExists(targetDir) {
|
||||||
|
// remErr := os.RemoveAll(targetDir)
|
||||||
|
// if remErr != nil {
|
||||||
|
// return remErr
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// _, cloneErr := git.Clone(
|
||||||
|
// clone.Repository(repoUrl),
|
||||||
|
// clone.Directory(targetDir),
|
||||||
|
// )
|
||||||
|
// if cloneErr != nil {
|
||||||
|
// return cloneErr
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// return nil
|
||||||
|
//}
|
||||||
|
|
||||||
|
func getTokens(fileName string) ([]string, error) {
|
||||||
|
fset := token.NewFileSet()
|
||||||
|
|
||||||
|
var result []string
|
||||||
|
|
||||||
|
node, err := parser.ParseFile(fset, fileName, nil, parser.ParseComments)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
ast.Inspect(
|
||||||
|
node, func(n ast.Node) bool {
|
||||||
|
// Suche nach Typ-Deklarationen (structs)
|
||||||
|
ts, ok := n.(*ast.TypeSpec)
|
||||||
|
if ok {
|
||||||
|
if strings.Contains(ts.Name.Name, "Model") {
|
||||||
|
ast.Inspect(
|
||||||
|
ts, func(sn ast.Node) bool {
|
||||||
|
tts, tok := sn.(*ast.Field)
|
||||||
|
if tok {
|
||||||
|
result = append(result, tts.Names[0].String())
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
},
|
||||||
|
)
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
@ -3,6 +3,7 @@ package build
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
|
"log/slog"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"syscall"
|
"syscall"
|
||||||
|
|
@ -74,14 +75,24 @@ func Copy(srcFile, dstFile string) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
defer out.Close()
|
defer func(out *os.File) {
|
||||||
|
err := out.Close()
|
||||||
|
if err != nil {
|
||||||
|
slog.Error("failed to close file", slog.Any("err", err))
|
||||||
|
}
|
||||||
|
}(out)
|
||||||
|
|
||||||
in, err := os.Open(srcFile)
|
in, err := os.Open(srcFile)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
defer in.Close()
|
defer func(in *os.File) {
|
||||||
|
err := in.Close()
|
||||||
|
if err != nil {
|
||||||
|
slog.Error("error closing destination file", slog.Any("err", err))
|
||||||
|
}
|
||||||
|
}(in)
|
||||||
|
|
||||||
_, err = io.Copy(out, in)
|
_, err = io.Copy(out, in)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
120
generator/cmd/build/functions.go
Normal file
120
generator/cmd/build/functions.go
Normal file
|
|
@ -0,0 +1,120 @@
|
||||||
|
package build
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"log/slog"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"strings"
|
||||||
|
"text/template"
|
||||||
|
)
|
||||||
|
|
||||||
|
func FileExists(pathValue string) bool {
|
||||||
|
_, err := os.Stat(pathValue)
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func ucfirst(s string) string {
|
||||||
|
if s == "" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return strings.ToUpper(s[:1]) + s[1:]
|
||||||
|
}
|
||||||
|
|
||||||
|
func writeTemplateToFile(tplName, tplFile, outFile string, data *templateData) error {
|
||||||
|
fn := template.FuncMap{
|
||||||
|
"ucfirst": ucfirst,
|
||||||
|
}
|
||||||
|
|
||||||
|
tmpl, err := template.New(tplName).Funcs(fn).ParseFiles(tplFile)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
var f *os.File
|
||||||
|
f, err = os.Create(outFile)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
err = tmpl.Execute(f, *data)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
err = f.Close()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
/* saved for later
|
||||||
|
func deleteFiles(fNames ...string) error {
|
||||||
|
for _, fName := range fNames {
|
||||||
|
if _, err := os.Stat(fName); !os.IsNotExist(err) {
|
||||||
|
err = os.Remove(fName)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func copyFile(src, dst string) (int64, error) {
|
||||||
|
sourceFileStat, err := os.Stat(src)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if !sourceFileStat.Mode().IsRegular() {
|
||||||
|
return 0, fmt.Errorf("%s is not a regular file", src)
|
||||||
|
}
|
||||||
|
|
||||||
|
source, err := os.Open(src)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
defer func(source *os.File) {
|
||||||
|
err := source.Close()
|
||||||
|
if err != nil {
|
||||||
|
slog.Error("copyFile", "err", err)
|
||||||
|
}
|
||||||
|
}(source)
|
||||||
|
|
||||||
|
destination, err := os.Create(dst)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
defer func(destination *os.File) {
|
||||||
|
err := destination.Close()
|
||||||
|
if err != nil {
|
||||||
|
slog.Error("copyFile", "err", err)
|
||||||
|
}
|
||||||
|
}(destination)
|
||||||
|
nBytes, err := io.Copy(destination, source)
|
||||||
|
return nBytes, err
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
func checkCommands(commands []string) error {
|
||||||
|
for _, commandName := range commands {
|
||||||
|
if !commandExists(commandName) {
|
||||||
|
return fmt.Errorf("missing command %s", commandName)
|
||||||
|
}
|
||||||
|
slog.Info(" found", "command", commandName)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func commandExists(cmd string) bool {
|
||||||
|
_, err := exec.LookPath(cmd)
|
||||||
|
return err == nil
|
||||||
|
}
|
||||||
446
generator/cmd/build/oas-handler.go
Normal file
446
generator/cmd/build/oas-handler.go
Normal file
|
|
@ -0,0 +1,446 @@
|
||||||
|
package build
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"bytes"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"log/slog"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"path"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"gopkg.in/yaml.v3"
|
||||||
|
|
||||||
|
"github.com/ldez/go-git-cmd-wrapper/v2/clone"
|
||||||
|
"github.com/ldez/go-git-cmd-wrapper/v2/git"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
OasRepoName = "stackit-api-specifications"
|
||||||
|
OasRepo = "https://github.com/stackitcloud/stackit-api-specifications.git"
|
||||||
|
|
||||||
|
ResTypeResource = "resources"
|
||||||
|
ResTypeDataSource = "datasources"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Data struct {
|
||||||
|
ServiceName string `yaml:",omitempty" json:",omitempty"`
|
||||||
|
Versions []Version `yaml:"versions" json:"versions"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Version struct {
|
||||||
|
Name string `yaml:"name" json:"name"`
|
||||||
|
Path string `yaml:"path" json:"path"`
|
||||||
|
}
|
||||||
|
|
||||||
|
var oasTempDir string
|
||||||
|
|
||||||
|
func (b *Builder) oasHandler(specDir string) error {
|
||||||
|
if b.Verbose {
|
||||||
|
slog.Info("creating schema files", "dir", specDir)
|
||||||
|
}
|
||||||
|
if _, err := os.Stat(specDir); os.IsNotExist(err) {
|
||||||
|
return fmt.Errorf("spec files directory does not exist")
|
||||||
|
}
|
||||||
|
|
||||||
|
err := b.createRepoDir(b.SkipClone)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("%s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
err2 := b.handleServices(specDir)
|
||||||
|
if err2 != nil {
|
||||||
|
return err2
|
||||||
|
}
|
||||||
|
|
||||||
|
if !b.SkipCleanup {
|
||||||
|
if b.Verbose {
|
||||||
|
slog.Info("Finally removing temporary files and directories")
|
||||||
|
}
|
||||||
|
err := os.RemoveAll(path.Join(b.rootDir, "generated"))
|
||||||
|
if err != nil {
|
||||||
|
slog.Error("RemoveAll", "dir", path.Join(b.rootDir, "generated"), "err", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
err = os.RemoveAll(oasTempDir)
|
||||||
|
if err != nil {
|
||||||
|
slog.Error("RemoveAll", "dir", oasTempDir, "err", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) handleServices(specDir string) error {
|
||||||
|
services, err := os.ReadDir(specDir)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, svc := range services {
|
||||||
|
if !svc.IsDir() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if b.Verbose {
|
||||||
|
slog.Info(" ... found", "service", svc.Name())
|
||||||
|
}
|
||||||
|
var svcVersions Data
|
||||||
|
svcVersions.ServiceName = svc.Name()
|
||||||
|
|
||||||
|
versionsErr := b.getServiceVersions(path.Join(specDir, svc.Name(), "generator_settings.yml"), &svcVersions)
|
||||||
|
if versionsErr != nil {
|
||||||
|
return versionsErr
|
||||||
|
}
|
||||||
|
|
||||||
|
oasSpecErr := b.generateServiceFiles(&svcVersions)
|
||||||
|
if oasSpecErr != nil {
|
||||||
|
return oasSpecErr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) getServiceVersions(confFile string, data *Data) error {
|
||||||
|
if _, cfgFileErr := os.Stat(confFile); os.IsNotExist(cfgFileErr) {
|
||||||
|
return fmt.Errorf("config file does not exist")
|
||||||
|
}
|
||||||
|
|
||||||
|
fileContent, fileErr := os.ReadFile(confFile)
|
||||||
|
if fileErr != nil {
|
||||||
|
return fileErr
|
||||||
|
}
|
||||||
|
convErr := yaml.Unmarshal(fileContent, &data)
|
||||||
|
if convErr != nil {
|
||||||
|
return convErr
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) createRepoDir(skipClone bool) error {
|
||||||
|
tmpDirName, err := os.MkdirTemp("", "oasbuild")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
oasTempDir = path.Join(tmpDirName, OasRepoName)
|
||||||
|
slog.Info("Creating oas repo dir", "dir", oasTempDir)
|
||||||
|
if !skipClone {
|
||||||
|
if FileExists(oasTempDir) {
|
||||||
|
slog.Warn("target dir exists - skipping", "targetDir", oasTempDir)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
out, cloneErr := git.Clone(
|
||||||
|
clone.Repository(OasRepo),
|
||||||
|
clone.Directory(oasTempDir),
|
||||||
|
)
|
||||||
|
if cloneErr != nil {
|
||||||
|
slog.Error("git clone error", "output", out)
|
||||||
|
return cloneErr
|
||||||
|
}
|
||||||
|
if b.Verbose {
|
||||||
|
slog.Info("git clone result", "output", out)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Builder) generateServiceFiles(data *Data) error {
|
||||||
|
err := os.MkdirAll(path.Join(b.rootDir, "generated", "specs"), 0o750)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, v := range data.Versions {
|
||||||
|
specFiles, specsErr := os.ReadDir(path.Join(b.rootDir, "service_specs", data.ServiceName, v.Name))
|
||||||
|
if specsErr != nil {
|
||||||
|
return specsErr
|
||||||
|
}
|
||||||
|
for _, specFile := range specFiles {
|
||||||
|
if specFile.IsDir() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
r := regexp.MustCompile(`^(.*)_config.yml$`)
|
||||||
|
matches := r.FindAllStringSubmatch(specFile.Name(), -1)
|
||||||
|
if matches == nil {
|
||||||
|
slog.Warn(" skipping file (no regex match)", "file", specFile.Name())
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
srcSpecFile := path.Join(b.rootDir, "service_specs", data.ServiceName, v.Name, specFile.Name())
|
||||||
|
|
||||||
|
if matches[0][0] != specFile.Name() {
|
||||||
|
return fmt.Errorf("matched filename differs from original filename - this should not happen")
|
||||||
|
}
|
||||||
|
resource := matches[0][1]
|
||||||
|
if b.Verbose {
|
||||||
|
slog.Info(
|
||||||
|
" found service spec",
|
||||||
|
"service",
|
||||||
|
data.ServiceName,
|
||||||