Compare commits
1 commit
alpha
...
chore/ci_t
| Author | SHA1 | Date | |
|---|---|---|---|
| 5024305909 |
76 changed files with 2437 additions and 3076 deletions
34
.github/workflows/ci.yaml
vendored
34
.github/workflows/ci.yaml
vendored
|
|
@ -22,39 +22,6 @@ env:
|
||||||
CODE_COVERAGE_ARTIFACT_NAME: "code-coverage"
|
CODE_COVERAGE_ARTIFACT_NAME: "code-coverage"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
runner_test:
|
|
||||||
name: "Test STACKIT runner"
|
|
||||||
runs-on: stackit-docker
|
|
||||||
steps:
|
|
||||||
- name: Install needed tools
|
|
||||||
run: |
|
|
||||||
apt-get -y -qq update
|
|
||||||
apt-get -y -qq install jq python3 python3-pip python-is-python3 s3cmd git make wget
|
|
||||||
|
|
||||||
- name: Setup Go
|
|
||||||
uses: actions/setup-go@v6
|
|
||||||
with:
|
|
||||||
go-version: ${{ env.GO_VERSION }}
|
|
||||||
|
|
||||||
- name: Install go tools
|
|
||||||
run: |
|
|
||||||
go install golang.org/x/tools/cmd/goimports@latest
|
|
||||||
go install github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework@latest
|
|
||||||
go install github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi@latest
|
|
||||||
|
|
||||||
- name: Setup JAVA
|
|
||||||
uses: actions/setup-java@v5
|
|
||||||
with:
|
|
||||||
distribution: 'temurin' # See 'Supported distributions' for available options
|
|
||||||
java-version: '21'
|
|
||||||
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v6
|
|
||||||
|
|
||||||
- name: Run build pkg directory
|
|
||||||
run: |
|
|
||||||
go run cmd/main.go build
|
|
||||||
|
|
||||||
publish_test:
|
publish_test:
|
||||||
name: "Test readiness for publishing provider"
|
name: "Test readiness for publishing provider"
|
||||||
needs: config
|
needs: config
|
||||||
|
|
@ -175,6 +142,7 @@ jobs:
|
||||||
- name: Acceptance Testing
|
- name: Acceptance Testing
|
||||||
env:
|
env:
|
||||||
TF_ACC: "1"
|
TF_ACC: "1"
|
||||||
|
TF_ACC_SERVICE_ACCOUNT_FILE: "~/service_account.json"
|
||||||
if: ${{ github.event_name == 'pull_request' }}
|
if: ${{ github.event_name == 'pull_request' }}
|
||||||
run: make test-acceptance-tf
|
run: make test-acceptance-tf
|
||||||
|
|
||||||
|
|
|
||||||
29
.github/workflows/runnerstats.yaml
vendored
29
.github/workflows/runnerstats.yaml
vendored
|
|
@ -1,29 +0,0 @@
|
||||||
name: Runner stats
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
stats-own:
|
|
||||||
name: "Get own runner stats"
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Install needed tools
|
|
||||||
run: |
|
|
||||||
apt-get -y -qq update
|
|
||||||
apt-get -y -qq install inxi
|
|
||||||
|
|
||||||
- name: Show stats
|
|
||||||
run: inxi -c 0
|
|
||||||
|
|
||||||
stats-stackit:
|
|
||||||
name: "Get STACKIT runner stats"
|
|
||||||
runs-on: stackit-docker
|
|
||||||
steps:
|
|
||||||
- name: Install needed tools
|
|
||||||
run: |
|
|
||||||
apt-get -y -qq update
|
|
||||||
apt-get -y -qq install inxi
|
|
||||||
|
|
||||||
- name: Show stats
|
|
||||||
run: inxi -c 0
|
|
||||||
4
Makefile
4
Makefile
|
|
@ -12,8 +12,7 @@ project-tools:
|
||||||
# LINT
|
# LINT
|
||||||
lint-golangci-lint:
|
lint-golangci-lint:
|
||||||
@echo "Linting with golangci-lint"
|
@echo "Linting with golangci-lint"
|
||||||
@go run github.com/golangci/golangci-lint/v2/cmd/golangci-lint run --fix --config golang-ci.yaml
|
@$(SCRIPTS_BASE)/lint-golangci-lint.sh
|
||||||
|
|
||||||
|
|
||||||
lint-tf:
|
lint-tf:
|
||||||
@echo "Linting terraform files"
|
@echo "Linting terraform files"
|
||||||
|
|
@ -24,7 +23,6 @@ lint: lint-golangci-lint lint-tf
|
||||||
# DOCUMENTATION GENERATION
|
# DOCUMENTATION GENERATION
|
||||||
generate-docs:
|
generate-docs:
|
||||||
@echo "Generating documentation with tfplugindocs"
|
@echo "Generating documentation with tfplugindocs"
|
||||||
|
|
||||||
@$(SCRIPTS_BASE)/tfplugindocs.sh
|
@$(SCRIPTS_BASE)/tfplugindocs.sh
|
||||||
|
|
||||||
build:
|
build:
|
||||||
|
|
|
||||||
|
|
@ -60,7 +60,7 @@ func (b *Builder) Build() error {
|
||||||
|
|
||||||
if !b.PackagesOnly {
|
if !b.PackagesOnly {
|
||||||
slog.Info(" ... Checking needed commands available")
|
slog.Info(" ... Checking needed commands available")
|
||||||
err := checkCommands([]string{})
|
err := checkCommands([]string{"tfplugingen-framework", "tfplugingen-openapi"})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
@ -111,7 +111,7 @@ func (b *Builder) Build() error {
|
||||||
}
|
}
|
||||||
|
|
||||||
slog.Info("Creating OAS dir")
|
slog.Info("Creating OAS dir")
|
||||||
err = os.MkdirAll(path.Join(genDir, "oas"), 0o755) //nolint:gosec // this dir is not sensitive, so we can use 0755
|
err = os.MkdirAll(path.Join(genDir, "oas"), 0755)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
@ -158,17 +158,7 @@ func (b *Builder) Build() error {
|
||||||
if err = cmd.Wait(); err != nil {
|
if err = cmd.Wait(); err != nil {
|
||||||
var exitErr *exec.ExitError
|
var exitErr *exec.ExitError
|
||||||
if errors.As(err, &exitErr) {
|
if errors.As(err, &exitErr) {
|
||||||
slog.Error(
|
slog.Error("cmd.Wait", "code", exitErr.ExitCode(), "error", err, "stdout", stdOut.String(), "stderr", stdErr.String())
|
||||||
"cmd.Wait",
|
|
||||||
"code",
|
|
||||||
exitErr.ExitCode(),
|
|
||||||
"error",
|
|
||||||
err,
|
|
||||||
"stdout",
|
|
||||||
stdOut.String(),
|
|
||||||
"stderr",
|
|
||||||
stdErr.String(),
|
|
||||||
)
|
|
||||||
return fmt.Errorf("%s", stdErr.String())
|
return fmt.Errorf("%s", stdErr.String())
|
||||||
}
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
@ -202,11 +192,7 @@ func (b *Builder) Build() error {
|
||||||
}
|
}
|
||||||
|
|
||||||
slog.Info("Rearranging package directories")
|
slog.Info("Rearranging package directories")
|
||||||
//nolint:gosec // this dir is not sensitive, so we can use 0755
|
err = os.MkdirAll(path.Join(*root, "pkg_gen"), 0755) // noqa:gosec
|
||||||
err = os.MkdirAll(
|
|
||||||
path.Join(*root, "pkg_gen"),
|
|
||||||
0o755,
|
|
||||||
)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
@ -216,9 +202,7 @@ func (b *Builder) Build() error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
for _, item := range items {
|
for _, item := range items {
|
||||||
if !item.IsDir() {
|
if item.IsDir() {
|
||||||
continue
|
|
||||||
}
|
|
||||||
slog.Info(" -> package", "name", item.Name())
|
slog.Info(" -> package", "name", item.Name())
|
||||||
tgtDir := path.Join(*root, "pkg_gen", item.Name())
|
tgtDir := path.Join(*root, "pkg_gen", item.Name())
|
||||||
if fileExists(tgtDir) {
|
if fileExists(tgtDir) {
|
||||||
|
|
@ -232,6 +216,7 @@ func (b *Builder) Build() error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if !b.PackagesOnly {
|
if !b.PackagesOnly {
|
||||||
slog.Info("Generating service boilerplate")
|
slog.Info("Generating service boilerplate")
|
||||||
|
|
@ -290,8 +275,8 @@ type templateData struct {
|
||||||
Fields []string
|
Fields []string
|
||||||
}
|
}
|
||||||
|
|
||||||
func fileExists(pathValue string) bool {
|
func fileExists(path string) bool {
|
||||||
_, err := os.Stat(pathValue)
|
_, err := os.Stat(path)
|
||||||
if os.IsNotExist(err) {
|
if os.IsNotExist(err) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
@ -327,22 +312,10 @@ func createBoilerplate(rootFolder, folder string) error {
|
||||||
|
|
||||||
resourceName := res.Name()
|
resourceName := res.Name()
|
||||||
|
|
||||||
dsFile := path.Join(
|
dsFile := path.Join(folder, svc.Name(), res.Name(), "datasources_gen", fmt.Sprintf("%s_data_source_gen.go", res.Name()))
|
||||||
folder,
|
|
||||||
svc.Name(),
|
|
||||||
res.Name(),
|
|
||||||
"datasources_gen",
|
|
||||||
fmt.Sprintf("%s_data_source_gen.go", res.Name()),
|
|
||||||
)
|
|
||||||
handleDS = fileExists(dsFile)
|
handleDS = fileExists(dsFile)
|
||||||
|
|
||||||
resFile := path.Join(
|
resFile := path.Join(folder, svc.Name(), res.Name(), "resources_gen", fmt.Sprintf("%s_resource_gen.go", res.Name()))
|
||||||
folder,
|
|
||||||
svc.Name(),
|
|
||||||
res.Name(),
|
|
||||||
"resources_gen",
|
|
||||||
fmt.Sprintf("%s_resource_gen.go", res.Name()),
|
|
||||||
)
|
|
||||||
handleRes = fileExists(resFile)
|
handleRes = fileExists(resFile)
|
||||||
|
|
||||||
dsGoFile := path.Join(folder, svc.Name(), res.Name(), "datasource.go")
|
dsGoFile := path.Join(folder, svc.Name(), res.Name(), "datasource.go")
|
||||||
|
|
@ -434,6 +407,7 @@ func createBoilerplate(rootFolder, folder string) error {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -442,7 +416,7 @@ func createBoilerplate(rootFolder, folder string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
func ucfirst(s string) string {
|
func ucfirst(s string) string {
|
||||||
if s == "" {
|
if len(s) == 0 {
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
return strings.ToUpper(s[:1]) + s[1:]
|
return strings.ToUpper(s[:1]) + s[1:]
|
||||||
|
|
@ -477,8 +451,8 @@ func writeTemplateToFile(tplName, tplFile, outFile string, data *templateData) e
|
||||||
}
|
}
|
||||||
|
|
||||||
func generateServiceFiles(rootDir, generatorDir string) error {
|
func generateServiceFiles(rootDir, generatorDir string) error {
|
||||||
//nolint:gosec // this file is not sensitive, so we can use 0755
|
// slog.Info("Generating specs folder")
|
||||||
err := os.MkdirAll(path.Join(rootDir, "generated", "specs"), 0o755)
|
err := os.MkdirAll(path.Join(rootDir, "generated", "specs"), 0755)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
@ -516,6 +490,7 @@ func generateServiceFiles(rootDir, generatorDir string) error {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// slog.Info("Checking spec", "name", spec.Name())
|
||||||
r := regexp.MustCompile(`^(.*)_config.yml$`)
|
r := regexp.MustCompile(`^(.*)_config.yml$`)
|
||||||
matches := r.FindAllStringSubmatch(specFile.Name(), -1)
|
matches := r.FindAllStringSubmatch(specFile.Name(), -1)
|
||||||
if matches != nil {
|
if matches != nil {
|
||||||
|
|
@ -531,44 +506,27 @@ func generateServiceFiles(rootDir, generatorDir string) error {
|
||||||
resource,
|
resource,
|
||||||
)
|
)
|
||||||
|
|
||||||
oasFile := path.Join(
|
oasFile := path.Join(generatorDir, "oas", fmt.Sprintf("%s%s.json", service.Name(), svcVersion.Name()))
|
||||||
generatorDir,
|
|
||||||
"oas",
|
|
||||||
fmt.Sprintf("%s%s.json", service.Name(), svcVersion.Name()),
|
|
||||||
)
|
|
||||||
if _, oasErr := os.Stat(oasFile); os.IsNotExist(oasErr) {
|
if _, oasErr := os.Stat(oasFile); os.IsNotExist(oasErr) {
|
||||||
slog.Warn(
|
slog.Warn(" could not find matching oas", "svc", service.Name(), "version", svcVersion.Name())
|
||||||
" could not find matching oas",
|
|
||||||
"svc",
|
|
||||||
service.Name(),
|
|
||||||
"version",
|
|
||||||
svcVersion.Name(),
|
|
||||||
)
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
scName := fmt.Sprintf("%s%s", service.Name(), svcVersion.Name())
|
scName := fmt.Sprintf("%s%s", service.Name(), svcVersion.Name())
|
||||||
scName = strings.ReplaceAll(scName, "-", "")
|
scName = strings.ReplaceAll(scName, "-", "")
|
||||||
//nolint:gosec // this file is not sensitive, so we can use 0755
|
err = os.MkdirAll(path.Join(rootDir, "generated", "internal", "services", scName, resource), 0755)
|
||||||
err = os.MkdirAll(path.Join(rootDir, "generated", "internal", "services", scName, resource), 0o755)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
specJsonFile := path.Join(
|
// slog.Info("Generating openapi spec json")
|
||||||
rootDir,
|
specJsonFile := path.Join(rootDir, "generated", "specs", fmt.Sprintf("%s_%s_spec.json", scName, resource))
|
||||||
"generated",
|
|
||||||
"specs",
|
|
||||||
fmt.Sprintf("%s_%s_spec.json", scName, resource),
|
|
||||||
)
|
|
||||||
|
|
||||||
var stdOut, stdErr bytes.Buffer
|
var stdOut, stdErr bytes.Buffer
|
||||||
|
|
||||||
// nolint:gosec // #nosec this command is not using any untrusted input, so we can ignore gosec warning
|
// noqa:gosec
|
||||||
cmd := exec.Command(
|
cmd := exec.Command(
|
||||||
"go",
|
"tfplugingen-openapi",
|
||||||
"run",
|
|
||||||
"github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi",
|
|
||||||
"generate",
|
"generate",
|
||||||
"--config",
|
"--config",
|
||||||
path.Join(rootDir, "service_specs", service.Name(), svcVersion.Name(), fileName),
|
path.Join(rootDir, "service_specs", service.Name(), svcVersion.Name(), fileName),
|
||||||
|
|
@ -595,29 +553,11 @@ func generateServiceFiles(rootDir, generatorDir string) error {
|
||||||
if err = cmd.Wait(); err != nil {
|
if err = cmd.Wait(); err != nil {
|
||||||
var exitErr *exec.ExitError
|
var exitErr *exec.ExitError
|
||||||
if errors.As(err, &exitErr) {
|
if errors.As(err, &exitErr) {
|
||||||
slog.Error(
|
slog.Error("tfplugingen-openapi generate", "code", exitErr.ExitCode(), "error", err, "stdout", stdOut.String(), "stderr", stdErr.String())
|
||||||
"tfplugingen-openapi generate",
|
|
||||||
"code",
|
|
||||||
exitErr.ExitCode(),
|
|
||||||
"error",
|
|
||||||
err,
|
|
||||||
"stdout",
|
|
||||||
stdOut.String(),
|
|
||||||
"stderr",
|
|
||||||
stdErr.String(),
|
|
||||||
)
|
|
||||||
return fmt.Errorf("%s", stdErr.String())
|
return fmt.Errorf("%s", stdErr.String())
|
||||||
}
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
slog.Error(
|
slog.Error("tfplugingen-openapi generate", "err", err, "stdout", stdOut.String(), "stderr", stdErr.String())
|
||||||
"tfplugingen-openapi generate",
|
|
||||||
"err",
|
|
||||||
err,
|
|
||||||
"stdout",
|
|
||||||
stdOut.String(),
|
|
||||||
"stderr",
|
|
||||||
stdErr.String(),
|
|
||||||
)
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -625,26 +565,18 @@ func generateServiceFiles(rootDir, generatorDir string) error {
|
||||||
slog.Warn(" command output", "stdout", stdOut.String(), "stderr", stdErr.String())
|
slog.Warn(" command output", "stdout", stdOut.String(), "stderr", stdErr.String())
|
||||||
}
|
}
|
||||||
|
|
||||||
tgtFolder := path.Join(
|
// slog.Info("Creating terraform svc resource files folder")
|
||||||
rootDir,
|
tgtFolder := path.Join(rootDir, "generated", "internal", "services", scName, resource, "resources_gen")
|
||||||
"generated",
|
err = os.MkdirAll(tgtFolder, 0755)
|
||||||
"internal",
|
|
||||||
"services",
|
|
||||||
scName,
|
|
||||||
resource,
|
|
||||||
"resources_gen",
|
|
||||||
)
|
|
||||||
//nolint:gosec // this file is not sensitive, so we can use 0755
|
|
||||||
err = os.MkdirAll(tgtFolder, 0o755)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// nolint:gosec // #nosec this command is not using any untrusted input, so we can ignore gosec warning
|
// slog.Info("Generating terraform svc resource files")
|
||||||
|
|
||||||
|
// noqa:gosec
|
||||||
cmd2 := exec.Command(
|
cmd2 := exec.Command(
|
||||||
"go",
|
"tfplugingen-framework",
|
||||||
"run",
|
|
||||||
"github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework",
|
|
||||||
"generate",
|
"generate",
|
||||||
"resources",
|
"resources",
|
||||||
"--input",
|
"--input",
|
||||||
|
|
@ -665,53 +597,27 @@ func generateServiceFiles(rootDir, generatorDir string) error {
|
||||||
if err = cmd2.Wait(); err != nil {
|
if err = cmd2.Wait(); err != nil {
|
||||||
var exitErr *exec.ExitError
|
var exitErr *exec.ExitError
|
||||||
if errors.As(err, &exitErr) {
|
if errors.As(err, &exitErr) {
|
||||||
slog.Error(
|
slog.Error("tfplugingen-framework generate resources", "code", exitErr.ExitCode(), "error", err, "stdout", stdOut.String(), "stderr", stdErr.String())
|
||||||
"tfplugingen-framework generate resources",
|
|
||||||
"code",
|
|
||||||
exitErr.ExitCode(),
|
|
||||||
"error",
|
|
||||||
err,
|
|
||||||
"stdout",
|
|
||||||
stdOut.String(),
|
|
||||||
"stderr",
|
|
||||||
stdErr.String(),
|
|
||||||
)
|
|
||||||
return fmt.Errorf("%s", stdErr.String())
|
return fmt.Errorf("%s", stdErr.String())
|
||||||
}
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
slog.Error(
|
slog.Error("tfplugingen-framework generate resources", "err", err, "stdout", stdOut.String(), "stderr", stdErr.String())
|
||||||
"tfplugingen-framework generate resources",
|
|
||||||
"err",
|
|
||||||
err,
|
|
||||||
"stdout",
|
|
||||||
stdOut.String(),
|
|
||||||
"stderr",
|
|
||||||
stdErr.String(),
|
|
||||||
)
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
tgtFolder = path.Join(
|
// slog.Info("Creating terraform svc datasource files folder")
|
||||||
rootDir,
|
tgtFolder = path.Join(rootDir, "generated", "internal", "services", scName, resource, "datasources_gen")
|
||||||
"generated",
|
err = os.MkdirAll(tgtFolder, 0755)
|
||||||
"internal",
|
|
||||||
"services",
|
|
||||||
scName,
|
|
||||||
resource,
|
|
||||||
"datasources_gen",
|
|
||||||
)
|
|
||||||
//nolint:gosec // this directory is not sensitive, so we can use 0755
|
|
||||||
err = os.MkdirAll(tgtFolder, 0o755)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// nolint:gosec // #nosec this command is not using any untrusted input, so we can ignore gosec warning
|
// slog.Info("Generating terraform svc resource files")
|
||||||
|
|
||||||
|
// noqa:gosec
|
||||||
cmd3 := exec.Command(
|
cmd3 := exec.Command(
|
||||||
"go",
|
"tfplugingen-framework",
|
||||||
"run",
|
|
||||||
"github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework",
|
|
||||||
"generate",
|
"generate",
|
||||||
"data-sources",
|
"data-sources",
|
||||||
"--input",
|
"--input",
|
||||||
|
|
@ -733,29 +639,11 @@ func generateServiceFiles(rootDir, generatorDir string) error {
|
||||||
if err = cmd3.Wait(); err != nil {
|
if err = cmd3.Wait(); err != nil {
|
||||||
var exitErr *exec.ExitError
|
var exitErr *exec.ExitError
|
||||||
if errors.As(err, &exitErr) {
|
if errors.As(err, &exitErr) {
|
||||||
slog.Error(
|
slog.Error("tfplugingen-framework generate data-sources", "code", exitErr.ExitCode(), "error", err, "stdout", stdOut.String(), "stderr", stdErr.String())
|
||||||
"tfplugingen-framework generate data-sources",
|
|
||||||
"code",
|
|
||||||
exitErr.ExitCode(),
|
|
||||||
"error",
|
|
||||||
err,
|
|
||||||
"stdout",
|
|
||||||
stdOut.String(),
|
|
||||||
"stderr",
|
|
||||||
stdErr.String(),
|
|
||||||
)
|
|
||||||
return fmt.Errorf("%s", stdErr.String())
|
return fmt.Errorf("%s", stdErr.String())
|
||||||
}
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
slog.Error(
|
slog.Error("tfplugingen-framework generate data-sources", "err", err, "stdout", stdOut.String(), "stderr", stdErr.String())
|
||||||
"tfplugingen-framework generate data-sources",
|
|
||||||
"err",
|
|
||||||
err,
|
|
||||||
"stdout",
|
|
||||||
stdOut.String(),
|
|
||||||
"stderr",
|
|
||||||
stdErr.String(),
|
|
||||||
)
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -786,10 +674,10 @@ func handleTfTagForDatasourceFile(filePath, service, resource string) error {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
defer f.Close()
|
||||||
|
|
||||||
root, err := getRoot()
|
root, err := getRoot()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
//nolint:gocritic // in this case, we want to log the error and exit, as we cannot proceed without the root directory
|
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -797,6 +685,7 @@ func handleTfTagForDatasourceFile(filePath, service, resource string) error {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
defer tmp.Close()
|
||||||
|
|
||||||
sc := bufio.NewScanner(f)
|
sc := bufio.NewScanner(f)
|
||||||
for sc.Scan() {
|
for sc.Scan() {
|
||||||
|
|
@ -820,7 +709,6 @@ func handleTfTagForDatasourceFile(filePath, service, resource string) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
//nolint:gosec // path traversal is not a concern here
|
|
||||||
if err := os.Rename(tmp.Name(), filePath); err != nil {
|
if err := os.Rename(tmp.Name(), filePath); err != nil {
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
|
|
@ -885,23 +773,13 @@ func copyFile(src, dst string) (int64, error) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, err
|
return 0, err
|
||||||
}
|
}
|
||||||
defer func(source *os.File) {
|
defer source.Close()
|
||||||
err := source.Close()
|
|
||||||
if err != nil {
|
|
||||||
slog.Error("copyFile", "err", err)
|
|
||||||
}
|
|
||||||
}(source)
|
|
||||||
|
|
||||||
destination, err := os.Create(dst)
|
destination, err := os.Create(dst)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, err
|
return 0, err
|
||||||
}
|
}
|
||||||
defer func(destination *os.File) {
|
defer destination.Close()
|
||||||
err := destination.Close()
|
|
||||||
if err != nil {
|
|
||||||
slog.Error("copyFile", "err", err)
|
|
||||||
}
|
|
||||||
}(destination)
|
|
||||||
nBytes, err := io.Copy(destination, source)
|
nBytes, err := io.Copy(destination, source)
|
||||||
return nBytes, err
|
return nBytes, err
|
||||||
}
|
}
|
||||||
|
|
@ -912,10 +790,12 @@ func getOnlyLatest(m map[string]version) (map[string]version, error) {
|
||||||
item, ok := tmpMap[k]
|
item, ok := tmpMap[k]
|
||||||
if !ok {
|
if !ok {
|
||||||
tmpMap[k] = v
|
tmpMap[k] = v
|
||||||
} else if item.major == v.major && item.minor < v.minor {
|
} else {
|
||||||
|
if item.major == v.major && item.minor < v.minor {
|
||||||
tmpMap[k] = v
|
tmpMap[k] = v
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
return tmpMap, nil
|
return tmpMap, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -927,9 +807,7 @@ func getVersions(dir string) (map[string]version, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, entry := range children {
|
for _, entry := range children {
|
||||||
if !entry.IsDir() {
|
if entry.IsDir() {
|
||||||
continue
|
|
||||||
}
|
|
||||||
versions, err := os.ReadDir(path.Join(dir, "services", entry.Name()))
|
versions, err := os.ReadDir(path.Join(dir, "services", entry.Name()))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|
@ -942,16 +820,15 @@ func getVersions(dir string) (map[string]version, error) {
|
||||||
res[k] = v
|
res[k] = v
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
return res, nil
|
return res, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func extractVersions(service string, versionDirs []os.DirEntry) (map[string]version, error) {
|
func extractVersions(service string, versionDirs []os.DirEntry) (map[string]version, error) {
|
||||||
res := make(map[string]version)
|
res := make(map[string]version)
|
||||||
for _, vDir := range versionDirs {
|
for _, vDir := range versionDirs {
|
||||||
if !vDir.IsDir() {
|
if vDir.IsDir() {
|
||||||
continue
|
r := regexp.MustCompile(`v([0-9]+)([a-z]+)([0-9]*)`)
|
||||||
}
|
|
||||||
r := regexp.MustCompile(`v(\d+)([a-z]+)(\d*)`)
|
|
||||||
matches := r.FindAllStringSubmatch(vDir.Name(), -1)
|
matches := r.FindAllStringSubmatch(vDir.Name(), -1)
|
||||||
if matches == nil {
|
if matches == nil {
|
||||||
continue
|
continue
|
||||||
|
|
@ -965,6 +842,7 @@ func extractVersions(service string, versionDirs []os.DirEntry) (map[string]vers
|
||||||
res[*svc] = *ver
|
res[*svc] = *ver
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
return res, nil
|
return res, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1049,25 +927,30 @@ func getTokens(fileName string) ([]string, error) {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
ast.Inspect(
|
ast.Inspect(node, func(n ast.Node) bool {
|
||||||
node, func(n ast.Node) bool {
|
|
||||||
// Suche nach Typ-Deklarationen (structs)
|
// Suche nach Typ-Deklarationen (structs)
|
||||||
ts, ok := n.(*ast.TypeSpec)
|
ts, ok := n.(*ast.TypeSpec)
|
||||||
if ok {
|
if ok {
|
||||||
if strings.Contains(ts.Name.Name, "Model") {
|
if strings.Contains(ts.Name.Name, "Model") {
|
||||||
ast.Inspect(
|
// fmt.Printf("found model: %s\n", ts.Name.Name)
|
||||||
ts, func(sn ast.Node) bool {
|
ast.Inspect(ts, func(sn ast.Node) bool {
|
||||||
tts, tok := sn.(*ast.Field)
|
tts, tok := sn.(*ast.Field)
|
||||||
if tok {
|
if tok {
|
||||||
|
// fmt.Printf(" found: %+v\n", tts.Names[0])
|
||||||
|
// spew.Dump(tts.Type)
|
||||||
|
|
||||||
result = append(result, tts.Names[0].String())
|
result = append(result, tts.Names[0].String())
|
||||||
|
|
||||||
|
// fld, fldOk := tts.Type.(*ast.Ident)
|
||||||
|
//if fldOk {
|
||||||
|
// fmt.Printf("type: %+v\n", fld)
|
||||||
|
//}
|
||||||
}
|
}
|
||||||
return true
|
return true
|
||||||
},
|
})
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return true
|
return true
|
||||||
},
|
})
|
||||||
)
|
|
||||||
return result, nil
|
return result, nil
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,6 @@ package build
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"log/slog"
|
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"syscall"
|
"syscall"
|
||||||
|
|
@ -75,24 +74,14 @@ func Copy(srcFile, dstFile string) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
defer func(out *os.File) {
|
defer out.Close()
|
||||||
err := out.Close()
|
|
||||||
if err != nil {
|
|
||||||
slog.Error("failed to close file", slog.Any("err", err))
|
|
||||||
}
|
|
||||||
}(out)
|
|
||||||
|
|
||||||
in, err := os.Open(srcFile)
|
in, err := os.Open(srcFile)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
defer func(in *os.File) {
|
defer in.Close()
|
||||||
err := in.Close()
|
|
||||||
if err != nil {
|
|
||||||
slog.Error("error closing destination file", slog.Any("err", err))
|
|
||||||
}
|
|
||||||
}(in)
|
|
||||||
|
|
||||||
_, err = io.Copy(out, in)
|
_, err = io.Copy(out, in)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
||||||
|
|
@ -16,7 +16,7 @@ var buildCmd = &cobra.Command{
|
||||||
Use: "build",
|
Use: "build",
|
||||||
Short: "Build the necessary boilerplate",
|
Short: "Build the necessary boilerplate",
|
||||||
Long: `...`,
|
Long: `...`,
|
||||||
RunE: func(_ *cobra.Command, _ []string) error {
|
RunE: func(cmd *cobra.Command, args []string) error {
|
||||||
b := build.Builder{
|
b := build.Builder{
|
||||||
SkipClone: skipClone,
|
SkipClone: skipClone,
|
||||||
SkipCleanup: skipCleanup,
|
SkipCleanup: skipCleanup,
|
||||||
|
|
@ -30,7 +30,7 @@ func NewBuildCmd() *cobra.Command {
|
||||||
return buildCmd
|
return buildCmd
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() { //nolint:gochecknoinits // This is the standard way to set up Cobra commands
|
func init() { // nolint: gochecknoinits
|
||||||
buildCmd.Flags().BoolVarP(&skipCleanup, "skip-clean", "c", false, "Skip cleanup steps")
|
buildCmd.Flags().BoolVarP(&skipCleanup, "skip-clean", "c", false, "Skip cleanup steps")
|
||||||
buildCmd.Flags().BoolVarP(&skipClone, "skip-clone", "g", false, "Skip cloning from git")
|
buildCmd.Flags().BoolVarP(&skipClone, "skip-clone", "g", false, "Skip cloning from git")
|
||||||
buildCmd.Flags().BoolVarP(&packagesOnly, "packages-only", "p", false, "Only generate packages")
|
buildCmd.Flags().BoolVarP(&packagesOnly, "packages-only", "p", false, "Only generate packages")
|
||||||
|
|
|
||||||
|
|
@ -12,15 +12,16 @@ var examplesCmd = &cobra.Command{
|
||||||
Use: "examples",
|
Use: "examples",
|
||||||
Short: "create examples",
|
Short: "create examples",
|
||||||
Long: `...`,
|
Long: `...`,
|
||||||
RunE: func(_ *cobra.Command, _ []string) error {
|
RunE: func(cmd *cobra.Command, args []string) error {
|
||||||
// filePathStr := "stackit/internal/services/postgresflexalpha/database/datasources_gen/database_data_source_gen.go"
|
|
||||||
|
//filePathStr := "stackit/internal/services/postgresflexalpha/database/datasources_gen/database_data_source_gen.go"
|
||||||
//
|
//
|
||||||
// src, err := os.ReadFile(filePathStr)
|
//src, err := os.ReadFile(filePathStr)
|
||||||
// if err != nil {
|
//if err != nil {
|
||||||
// return err
|
// return err
|
||||||
//}
|
//}
|
||||||
//
|
//
|
||||||
// i := interp.New(
|
//i := interp.New(
|
||||||
// interp.Options{
|
// interp.Options{
|
||||||
// GoPath: "/home/henselinm/.asdf/installs/golang/1.25.6/packages",
|
// GoPath: "/home/henselinm/.asdf/installs/golang/1.25.6/packages",
|
||||||
// BuildTags: nil,
|
// BuildTags: nil,
|
||||||
|
|
@ -33,46 +34,46 @@ var examplesCmd = &cobra.Command{
|
||||||
// Unrestricted: false,
|
// Unrestricted: false,
|
||||||
// },
|
// },
|
||||||
//)
|
//)
|
||||||
// err = i.Use(i.Symbols("github.com/hashicorp/terraform-plugin-framework-validators"))
|
//err = i.Use(i.Symbols("github.com/hashicorp/terraform-plugin-framework-validators"))
|
||||||
// if err != nil {
|
//if err != nil {
|
||||||
// return err
|
// return err
|
||||||
//}
|
//}
|
||||||
// err = i.Use(stdlib.Symbols)
|
//err = i.Use(stdlib.Symbols)
|
||||||
// if err != nil {
|
//if err != nil {
|
||||||
// return err
|
// return err
|
||||||
//}
|
//}
|
||||||
// _, err = i.Eval(string(src))
|
//_, err = i.Eval(string(src))
|
||||||
// if err != nil {
|
//if err != nil {
|
||||||
// return err
|
// return err
|
||||||
//}
|
//}
|
||||||
//
|
//
|
||||||
// v, err := i.Eval("DatabaseDataSourceSchema")
|
//v, err := i.Eval("DatabaseDataSourceSchema")
|
||||||
// if err != nil {
|
//if err != nil {
|
||||||
// return err
|
// return err
|
||||||
//}
|
//}
|
||||||
//
|
//
|
||||||
// bar := v.Interface().(func(string) string)
|
//bar := v.Interface().(func(string) string)
|
||||||
//
|
//
|
||||||
// r := bar("Kung")
|
//r := bar("Kung")
|
||||||
// println(r)
|
//println(r)
|
||||||
//
|
//
|
||||||
// evalPath, err := i.EvalPath(filePathStr)
|
//evalPath, err := i.EvalPath(filePathStr)
|
||||||
// if err != nil {
|
//if err != nil {
|
||||||
// return err
|
// return err
|
||||||
//}
|
//}
|
||||||
//
|
//
|
||||||
// fmt.Printf("%+v\n", evalPath)
|
//fmt.Printf("%+v\n", evalPath)
|
||||||
|
|
||||||
// _, err = i.Eval(`import "fmt"`)
|
//_, err = i.Eval(`import "fmt"`)
|
||||||
// if err != nil {
|
//if err != nil {
|
||||||
// return err
|
// return err
|
||||||
//}
|
//}
|
||||||
// _, err = i.Eval(`func Hallo() { fmt.Println("Hi!") }`)
|
//_, err = i.Eval(`func Hallo() { fmt.Println("Hi!") }`)
|
||||||
// if err != nil {
|
//if err != nil {
|
||||||
// return err
|
// return err
|
||||||
//}
|
//}
|
||||||
|
|
||||||
// v = i.Symbols("Hallo")
|
//v = i.Symbols("Hallo")
|
||||||
|
|
||||||
// fmt.Println(v)
|
// fmt.Println(v)
|
||||||
return workServices()
|
return workServices()
|
||||||
|
|
@ -109,6 +110,6 @@ func NewExamplesCmd() *cobra.Command {
|
||||||
return examplesCmd
|
return examplesCmd
|
||||||
}
|
}
|
||||||
|
|
||||||
// func init() { // nolint: gochecknoinits
|
//func init() { // nolint: gochecknoinits
|
||||||
// examplesCmd.Flags().BoolVarP(&example, "example", "e", false, "example")
|
// examplesCmd.Flags().BoolVarP(&example, "example", "e", false, "example")
|
||||||
//}
|
//}
|
||||||
|
|
|
||||||
|
|
@ -24,7 +24,7 @@ var getFieldsCmd = &cobra.Command{
|
||||||
Use: "get-fields",
|
Use: "get-fields",
|
||||||
Short: "get fields from file",
|
Short: "get fields from file",
|
||||||
Long: `...`,
|
Long: `...`,
|
||||||
PreRunE: func(_ *cobra.Command, _ []string) error {
|
PreRunE: func(cmd *cobra.Command, args []string) error {
|
||||||
typeStr := "data_source"
|
typeStr := "data_source"
|
||||||
if resType != "resource" && resType != "datasource" {
|
if resType != "resource" && resType != "datasource" {
|
||||||
return fmt.Errorf("--type can only be resource or datasource")
|
return fmt.Errorf("--type can only be resource or datasource")
|
||||||
|
|
@ -76,13 +76,13 @@ var getFieldsCmd = &cobra.Command{
|
||||||
|
|
||||||
//// Enum check
|
//// Enum check
|
||||||
// switch format {
|
// switch format {
|
||||||
// case "json", "yaml":
|
//case "json", "yaml":
|
||||||
//default:
|
//default:
|
||||||
// return fmt.Errorf("invalid --format: %s (want json|yaml)", format)
|
// return fmt.Errorf("invalid --format: %s (want json|yaml)", format)
|
||||||
//}
|
//}
|
||||||
return nil
|
return nil
|
||||||
},
|
},
|
||||||
RunE: func(_ *cobra.Command, _ []string) error {
|
RunE: func(cmd *cobra.Command, args []string) error {
|
||||||
return getFields(filePath)
|
return getFields(filePath)
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
@ -107,26 +107,31 @@ func getTokens(fileName string) ([]string, error) {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
ast.Inspect(
|
ast.Inspect(node, func(n ast.Node) bool {
|
||||||
node, func(n ast.Node) bool {
|
|
||||||
// Suche nach Typ-Deklarationen (structs)
|
// Suche nach Typ-Deklarationen (structs)
|
||||||
ts, ok := n.(*ast.TypeSpec)
|
ts, ok := n.(*ast.TypeSpec)
|
||||||
if ok {
|
if ok {
|
||||||
if strings.Contains(ts.Name.Name, "Model") {
|
if strings.Contains(ts.Name.Name, "Model") {
|
||||||
ast.Inspect(
|
// fmt.Printf("found model: %s\n", ts.Name.Name)
|
||||||
ts, func(sn ast.Node) bool {
|
ast.Inspect(ts, func(sn ast.Node) bool {
|
||||||
tts, tok := sn.(*ast.Field)
|
tts, tok := sn.(*ast.Field)
|
||||||
if tok {
|
if tok {
|
||||||
|
// fmt.Printf(" found: %+v\n", tts.Names[0])
|
||||||
|
// spew.Dump(tts.Type)
|
||||||
|
|
||||||
result = append(result, tts.Names[0].String())
|
result = append(result, tts.Names[0].String())
|
||||||
|
|
||||||
|
// fld, fldOk := tts.Type.(*ast.Ident)
|
||||||
|
//if fldOk {
|
||||||
|
// fmt.Printf("type: %+v\n", fld)
|
||||||
|
//}
|
||||||
}
|
}
|
||||||
return true
|
return true
|
||||||
},
|
})
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return true
|
return true
|
||||||
},
|
})
|
||||||
)
|
|
||||||
return result, nil
|
return result, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -134,15 +139,9 @@ func NewGetFieldsCmd() *cobra.Command {
|
||||||
return getFieldsCmd
|
return getFieldsCmd
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() { //nolint:gochecknoinits //this is the only way to add the command to the rootCmd
|
func init() { // nolint: gochecknoinits
|
||||||
getFieldsCmd.Flags().StringVarP(&inFile, "infile", "i", "", "input filename incl path")
|
getFieldsCmd.Flags().StringVarP(&inFile, "infile", "i", "", "input filename incl path")
|
||||||
getFieldsCmd.Flags().StringVarP(&svcName, "service", "s", "", "service name")
|
getFieldsCmd.Flags().StringVarP(&svcName, "service", "s", "", "service name")
|
||||||
getFieldsCmd.Flags().StringVarP(&resName, "resource", "r", "", "resource name")
|
getFieldsCmd.Flags().StringVarP(&resName, "resource", "r", "", "resource name")
|
||||||
getFieldsCmd.Flags().StringVarP(
|
getFieldsCmd.Flags().StringVarP(&resType, "type", "t", "resource", "resource type (data-source or resource [default])")
|
||||||
&resType,
|
|
||||||
"type",
|
|
||||||
"t",
|
|
||||||
"resource",
|
|
||||||
"resource type (data-source or resource [default])",
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -35,27 +35,36 @@ type GpgPublicKey struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Provider) CreateArchitectureFiles() error {
|
func (p *Provider) CreateArchitectureFiles() error {
|
||||||
|
// var namespace, provider, distPath, repoName, version, gpgFingerprint, gpgPubKeyFile, domain string
|
||||||
|
|
||||||
log.Println("* Creating architecture files in target directories")
|
log.Println("* Creating architecture files in target directories")
|
||||||
|
|
||||||
|
// filename = terraform-provider-[provider]_0.0.1_darwin_amd64.zip - provider_name + version + target + architecture + .zip
|
||||||
|
// prefix := fmt.Sprintf("v1/providers/%s/%s/%s/", namespace, provider, version)
|
||||||
prefix := path.Join("v1", "providers", p.Namespace, p.Provider, p.Version)
|
prefix := path.Join("v1", "providers", p.Namespace, p.Provider, p.Version)
|
||||||
|
|
||||||
|
// pathPrefix := fmt.Sprintf("release/%s", prefix)
|
||||||
pathPrefix := path.Join("release", prefix)
|
pathPrefix := path.Join("release", prefix)
|
||||||
|
|
||||||
|
// urlPrefix := fmt.Sprintf("https://%s/%s", domain, prefix)
|
||||||
urlPrefix, err := url.JoinPath("https://", p.Domain, prefix)
|
urlPrefix, err := url.JoinPath("https://", p.Domain, prefix)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("error creating base url: %w", err)
|
return fmt.Errorf("error creating base url: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// download url = https://example.com/v1/providers/namespace/provider/0.0.1/download/terraform-provider_0.0.1_darwin_amd64.zip
|
||||||
downloadUrlPrefix, err := url.JoinPath(urlPrefix, "download")
|
downloadUrlPrefix, err := url.JoinPath(urlPrefix, "download")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("error crearting download url: %w", err)
|
return fmt.Errorf("error crearting download url: %w", err)
|
||||||
}
|
}
|
||||||
downloadPathPrefix := path.Join(pathPrefix, "download")
|
downloadPathPrefix := path.Join(pathPrefix, "download")
|
||||||
|
|
||||||
|
// shasums url = https://example.com/v1/providers/namespace/provider/0.0.1/terraform-provider_0.0.1_SHA256SUMS
|
||||||
shasumsUrl, err := url.JoinPath(urlPrefix, fmt.Sprintf("%s_%s_SHA256SUMS", p.RepoName, p.Version))
|
shasumsUrl, err := url.JoinPath(urlPrefix, fmt.Sprintf("%s_%s_SHA256SUMS", p.RepoName, p.Version))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("error creating shasums url: %w", err)
|
return fmt.Errorf("error creating shasums url: %w", err)
|
||||||
}
|
}
|
||||||
|
// shasums_signature_url = https://example.com/v1/providers/namespace/provider/0.0.1/terraform-provider_0.0.1_SHA256SUMS.sig
|
||||||
shasumsSigUrl := shasumsUrl + ".sig"
|
shasumsSigUrl := shasumsUrl + ".sig"
|
||||||
|
|
||||||
gpgAsciiPub, err := p.ReadGpgFile()
|
gpgAsciiPub, err := p.ReadGpgFile()
|
||||||
|
|
@ -107,6 +116,33 @@ func (p *Provider) CreateArchitectureFiles() error {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
// var architectureTemplate = []byte(fmt.Sprintf(`
|
||||||
|
//{
|
||||||
|
// "protocols": [
|
||||||
|
// "4.0",
|
||||||
|
// "5.1",
|
||||||
|
// "6.0"
|
||||||
|
// ],
|
||||||
|
// "os": "%s",
|
||||||
|
// "arch": "%s",
|
||||||
|
// "filename": "%s",
|
||||||
|
// "download_url": "%s",
|
||||||
|
// "shasums_url": "%s",
|
||||||
|
// "shasums_signature_url": "%s",
|
||||||
|
// "shasum": "%s",
|
||||||
|
// "signing_keys": {
|
||||||
|
// "gpg_public_keys": [
|
||||||
|
// {
|
||||||
|
// "key_id": "%s",
|
||||||
|
// "ascii_armor": "%s",
|
||||||
|
// "trust_signature": "",
|
||||||
|
// "source": "",
|
||||||
|
// "source_url": ""
|
||||||
|
// }
|
||||||
|
// ]
|
||||||
|
// }
|
||||||
|
//}
|
||||||
|
// `, target, arch, fileName, downloadUrl, shasumsUrl, shasumsSigUrl, shasum, gpgFingerprint, gpgAsciiPub))
|
||||||
|
|
||||||
log.Printf(" - Arch file: %s", archFileName)
|
log.Printf(" - Arch file: %s", archFileName)
|
||||||
|
|
||||||
|
|
@ -124,12 +160,8 @@ func WriteArchitectureFile(filePath string, arch Architecture) error {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("error encoding data: %w", err)
|
return fmt.Errorf("error encoding data: %w", err)
|
||||||
}
|
}
|
||||||
//nolint:gosec // this file is not sensitive, so we can use os.ModePerm
|
|
||||||
err = os.WriteFile(
|
err = os.WriteFile(filePath, jsonString, os.ModePerm)
|
||||||
filePath,
|
|
||||||
jsonString,
|
|
||||||
os.ModePerm,
|
|
||||||
)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("error writing data: %w", err)
|
return fmt.Errorf("error writing data: %w", err)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -161,12 +161,10 @@ func (p *Provider) createVersionsFile() error {
|
||||||
target := fileNameSplit[2]
|
target := fileNameSplit[2]
|
||||||
arch := fileNameSplit[3]
|
arch := fileNameSplit[3]
|
||||||
|
|
||||||
version.Platforms = append(
|
version.Platforms = append(version.Platforms, Platform{
|
||||||
version.Platforms, Platform{
|
|
||||||
OS: target,
|
OS: target,
|
||||||
Arch: arch,
|
Arch: arch,
|
||||||
},
|
})
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
data := Data{}
|
data := Data{}
|
||||||
|
|
@ -208,19 +206,16 @@ func (p *Provider) CreateWellKnown() error {
|
||||||
log.Println("* Creating .well-known directory")
|
log.Println("* Creating .well-known directory")
|
||||||
pathString := path.Join(p.RootPath, "release", ".well-known")
|
pathString := path.Join(p.RootPath, "release", ".well-known")
|
||||||
|
|
||||||
//nolint:gosec // this file is not sensitive, so we can use ModePerm
|
|
||||||
err := os.MkdirAll(pathString, os.ModePerm)
|
err := os.MkdirAll(pathString, os.ModePerm)
|
||||||
if err != nil && !errors.Is(err, fs.ErrExist) {
|
if err != nil && !errors.Is(err, fs.ErrExist) {
|
||||||
return fmt.Errorf("error creating '%s' dir: %w", pathString, err)
|
return fmt.Errorf("error creating '%s' dir: %w", pathString, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Println(" - Writing to .well-known/terraform.json file")
|
log.Println(" - Writing to .well-known/terraform.json file")
|
||||||
|
|
||||||
//nolint:gosec // this file is not sensitive, so we can use 0644
|
|
||||||
err = os.WriteFile(
|
err = os.WriteFile(
|
||||||
fmt.Sprintf("%s/terraform.json", pathString),
|
fmt.Sprintf("%s/terraform.json", pathString),
|
||||||
[]byte(`{"providers.v1": "/v1/providers/"}`),
|
[]byte(`{"providers.v1": "/v1/providers/"}`),
|
||||||
0o644,
|
0644,
|
||||||
)
|
)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
|
@ -229,10 +224,9 @@ func (p *Provider) CreateWellKnown() error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func CreateDir(pathValue string) error {
|
func CreateDir(path string) error {
|
||||||
log.Printf("* Creating %s directory", pathValue)
|
log.Printf("* Creating %s directory", path)
|
||||||
//nolint:gosec // this file is not sensitive, so we can use ModePerm
|
err := os.MkdirAll(path, os.ModePerm)
|
||||||
err := os.MkdirAll(pathValue, os.ModePerm)
|
|
||||||
if errors.Is(err, fs.ErrExist) {
|
if errors.Is(err, fs.ErrExist) {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
@ -275,23 +269,13 @@ func CopyFile(src, dst string) (int64, error) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, err
|
return 0, err
|
||||||
}
|
}
|
||||||
defer func(source *os.File) {
|
defer source.Close()
|
||||||
err := source.Close()
|
|
||||||
if err != nil {
|
|
||||||
slog.Error("error closing source file", slog.Any("err", err))
|
|
||||||
}
|
|
||||||
}(source)
|
|
||||||
|
|
||||||
destination, err := os.Create(dst)
|
destination, err := os.Create(dst)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, err
|
return 0, err
|
||||||
}
|
}
|
||||||
defer func(destination *os.File) {
|
defer destination.Close()
|
||||||
err := destination.Close()
|
|
||||||
if err != nil {
|
|
||||||
slog.Error("error closing destination file", slog.Any("err", err))
|
|
||||||
}
|
|
||||||
}(destination)
|
|
||||||
nBytes, err := io.Copy(destination, source)
|
nBytes, err := io.Copy(destination, source)
|
||||||
return nBytes, err
|
return nBytes, err
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -35,12 +35,7 @@ func (d *Data) WriteToFile(filePath string) error {
|
||||||
return fmt.Errorf("error encoding data: %w", err)
|
return fmt.Errorf("error encoding data: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
//nolint:gosec // this file is not sensitive, so we can use os.ModePerm
|
err = os.WriteFile(filePath, jsonString, os.ModePerm)
|
||||||
err = os.WriteFile(
|
|
||||||
filePath,
|
|
||||||
jsonString,
|
|
||||||
os.ModePerm,
|
|
||||||
)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("error writing data: %w", err)
|
return fmt.Errorf("error writing data: %w", err)
|
||||||
}
|
}
|
||||||
|
|
@ -91,13 +86,7 @@ func (d *Data) LoadFromUrl(uri string) error {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
defer func(name string) {
|
defer os.Remove(file.Name()) // Clean up
|
||||||
//nolint:gosec // The file path is generated by os.CreateTemp and is not user-controllable
|
|
||||||
err := os.Remove(name)
|
|
||||||
if err != nil {
|
|
||||||
slog.Error("failed to remove temporary file", slog.Any("err", err))
|
|
||||||
}
|
|
||||||
}(file.Name()) // Clean up
|
|
||||||
|
|
||||||
err = DownloadFile(
|
err = DownloadFile(
|
||||||
u.String(),
|
u.String(),
|
||||||
|
|
@ -134,30 +123,20 @@ func (v *Version) AddProtocol(p string) error {
|
||||||
// DownloadFile will download a url and store it in local filepath.
|
// DownloadFile will download a url and store it in local filepath.
|
||||||
// It writes to the destination file as it downloads it, without
|
// It writes to the destination file as it downloads it, without
|
||||||
// loading the entire file into memory.
|
// loading the entire file into memory.
|
||||||
func DownloadFile(urlValue, filepath string) error {
|
func DownloadFile(url string, filepath string) error {
|
||||||
// Create the file
|
// Create the file
|
||||||
//nolint:gosec // path traversal is not a concern here, as the filepath is generated by us and not user input
|
|
||||||
out, err := os.Create(filepath)
|
out, err := os.Create(filepath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
defer func(out *os.File) {
|
defer out.Close()
|
||||||
err := out.Close()
|
|
||||||
if err != nil {
|
|
||||||
slog.Error("failed to close file", slog.Any("err", err))
|
|
||||||
}
|
|
||||||
}(out)
|
|
||||||
|
|
||||||
// Get the data
|
// Get the data
|
||||||
|
resp, err := http.Get(url)
|
||||||
//nolint:gosec,bodyclose // this is a controlled URL, not user input
|
|
||||||
resp, err := http.Get(urlValue)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
defer func(Body io.ReadCloser) {
|
defer resp.Body.Close()
|
||||||
_ = Body.Close()
|
|
||||||
}(resp.Body)
|
|
||||||
|
|
||||||
// Write the body to file
|
// Write the body to file
|
||||||
_, err = io.Copy(out, resp.Body)
|
_, err = io.Copy(out, resp.Body)
|
||||||
|
|
|
||||||
|
|
@ -29,32 +29,20 @@ var publishCmd = &cobra.Command{
|
||||||
Use: "publish",
|
Use: "publish",
|
||||||
Short: "Publish terraform provider",
|
Short: "Publish terraform provider",
|
||||||
Long: `...`,
|
Long: `...`,
|
||||||
RunE: func(_ *cobra.Command, _ []string) error {
|
RunE: func(_ *cobra.Command, args []string) error {
|
||||||
return publish()
|
return publish()
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() { //nolint:gochecknoinits //this is the standard way to set up cobra commands
|
func init() { // nolint: gochecknoinits
|
||||||
publishCmd.Flags().StringVarP(&namespace, "namespace", "n", "", "Namespace for the Terraform registry.")
|
publishCmd.Flags().StringVarP(&namespace, "namespace", "n", "", "Namespace for the Terraform registry.")
|
||||||
publishCmd.Flags().StringVarP(&domain, "domain", "d", "", "Domain for the Terraform registry.")
|
publishCmd.Flags().StringVarP(&domain, "domain", "d", "", "Domain for the Terraform registry.")
|
||||||
publishCmd.Flags().StringVarP(&providerName, "providerName", "p", "", "ProviderName for the Terraform registry.")
|
publishCmd.Flags().StringVarP(&providerName, "providerName", "p", "", "ProviderName for the Terraform registry.")
|
||||||
publishCmd.Flags().StringVarP(&distPath, "distPath", "x", "dist", "Dist Path for the Terraform registry.")
|
publishCmd.Flags().StringVarP(&distPath, "distPath", "x", "dist", "Dist Path for the Terraform registry.")
|
||||||
publishCmd.Flags().StringVarP(&repoName, "repoName", "r", "", "RepoName for the Terraform registry.")
|
publishCmd.Flags().StringVarP(&repoName, "repoName", "r", "", "RepoName for the Terraform registry.")
|
||||||
publishCmd.Flags().StringVarP(&version, "version", "v", "", "Version for the Terraform registry.")
|
publishCmd.Flags().StringVarP(&version, "version", "v", "", "Version for the Terraform registry.")
|
||||||
publishCmd.Flags().StringVarP(
|
publishCmd.Flags().StringVarP(&gpgFingerprint, "gpgFingerprint", "f", "", "GPG Fingerprint for the Terraform registry.")
|
||||||
&gpgFingerprint,
|
publishCmd.Flags().StringVarP(&gpgPubKeyFile, "gpgPubKeyFile", "k", "", "GPG PubKey file name for the Terraform registry.")
|
||||||
"gpgFingerprint",
|
|
||||||
"f",
|
|
||||||
"",
|
|
||||||
"GPG Fingerprint for the Terraform registry.",
|
|
||||||
)
|
|
||||||
publishCmd.Flags().StringVarP(
|
|
||||||
&gpgPubKeyFile,
|
|
||||||
"gpgPubKeyFile",
|
|
||||||
"k",
|
|
||||||
"",
|
|
||||||
"GPG PubKey file name for the Terraform registry.",
|
|
||||||
)
|
|
||||||
|
|
||||||
err := publishCmd.MarkFlagRequired("namespace")
|
err := publishCmd.MarkFlagRequired("namespace")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
@ -117,7 +105,6 @@ func publish() error {
|
||||||
|
|
||||||
// Create release dir - only the contents of this need to be uploaded to S3
|
// Create release dir - only the contents of this need to be uploaded to S3
|
||||||
log.Printf("* Creating release directory")
|
log.Printf("* Creating release directory")
|
||||||
//nolint:gosec // this directory is not sensitive, so we can use 0750
|
|
||||||
err = os.MkdirAll(path.Join(p.RootPath, "release"), os.ModePerm)
|
err = os.MkdirAll(path.Join(p.RootPath, "release"), os.ModePerm)
|
||||||
if err != nil && !errors.Is(err, fs.ErrExist) {
|
if err != nil && !errors.Is(err, fs.ErrExist) {
|
||||||
return fmt.Errorf("error creating '%s' dir: %w", path.Join(p.RootPath, "release"), err)
|
return fmt.Errorf("error creating '%s' dir: %w", path.Join(p.RootPath, "release"), err)
|
||||||
|
|
|
||||||
38
docs/data-sources/postgresflexalpha_database.md
Normal file
38
docs/data-sources/postgresflexalpha_database.md
Normal file
|
|
@ -0,0 +1,38 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_postgresflexalpha_database Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_postgresflexalpha_database (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
data "stackitprivatepreview_postgresflexalpha_database" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
database_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `database_id` (Number) The ID of the database.
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `id` (String) Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`,`database_id`\".",
|
||||||
|
- `name` (String) The name of the database.
|
||||||
|
- `owner` (String) The owner of the database.
|
||||||
|
- `tf_original_api_id` (Number) The id of the database.
|
||||||
54
docs/data-sources/postgresflexalpha_flavor.md
Normal file
54
docs/data-sources/postgresflexalpha_flavor.md
Normal file
|
|
@ -0,0 +1,54 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_postgresflexalpha_flavor Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_postgresflexalpha_flavor (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
data "stackitprivatepreview_postgresflexalpha_flavor" "flavor" {
|
||||||
|
project_id = var.project_id
|
||||||
|
region = var.region
|
||||||
|
cpu = 4
|
||||||
|
ram = 16
|
||||||
|
node_type = "Single"
|
||||||
|
storage_class = "premium-perf2-stackit"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `cpu` (Number) The cpu count of the instance.
|
||||||
|
- `node_type` (String) defines the nodeType it can be either single or replica
|
||||||
|
- `project_id` (String) The cpu count of the instance.
|
||||||
|
- `ram` (Number) The memory of the instance in Gibibyte.
|
||||||
|
- `region` (String) The flavor description.
|
||||||
|
- `storage_class` (String) The memory of the instance in Gibibyte.
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `description` (String) The flavor description.
|
||||||
|
- `flavor_id` (String) The flavor id of the instance flavor.
|
||||||
|
- `id` (String) The terraform id of the instance flavor.
|
||||||
|
- `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte.
|
||||||
|
- `min_gb` (Number) minimum storage which is required to order in Gigabyte.
|
||||||
|
- `storage_classes` (Attributes List) (see [below for nested schema](#nestedatt--storage_classes))
|
||||||
|
|
||||||
|
<a id="nestedatt--storage_classes"></a>
|
||||||
|
### Nested Schema for `storage_classes`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `class` (String)
|
||||||
|
- `max_io_per_sec` (Number)
|
||||||
|
- `max_through_in_mb` (Number)
|
||||||
68
docs/data-sources/postgresflexalpha_flavors.md
Normal file
68
docs/data-sources/postgresflexalpha_flavors.md
Normal file
|
|
@ -0,0 +1,68 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_postgresflexalpha_flavors Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_postgresflexalpha_flavors (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `page` (Number) Number of the page of items list to be returned.
|
||||||
|
- `size` (Number) Number of items to be returned on each page.
|
||||||
|
- `sort` (String) Sorting of the flavors to be returned on each page.
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `flavors` (Attributes List) List of flavors available for the project. (see [below for nested schema](#nestedatt--flavors))
|
||||||
|
- `pagination` (Attributes) (see [below for nested schema](#nestedatt--pagination))
|
||||||
|
|
||||||
|
<a id="nestedatt--flavors"></a>
|
||||||
|
### Nested Schema for `flavors`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `cpu` (Number) The cpu count of the instance.
|
||||||
|
- `description` (String) The flavor description.
|
||||||
|
- `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte.
|
||||||
|
- `memory` (Number) The memory of the instance in Gibibyte.
|
||||||
|
- `min_gb` (Number) minimum storage which is required to order in Gigabyte.
|
||||||
|
- `node_type` (String) defines the nodeType it can be either single or replica
|
||||||
|
- `storage_classes` (Attributes List) maximum storage which can be ordered for the flavor in Gigabyte. (see [below for nested schema](#nestedatt--flavors--storage_classes))
|
||||||
|
- `tf_original_api_id` (String) The id of the instance flavor.
|
||||||
|
|
||||||
|
<a id="nestedatt--flavors--storage_classes"></a>
|
||||||
|
### Nested Schema for `flavors.storage_classes`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `class` (String)
|
||||||
|
- `max_io_per_sec` (Number)
|
||||||
|
- `max_through_in_mb` (Number)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--pagination"></a>
|
||||||
|
### Nested Schema for `pagination`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `page` (Number)
|
||||||
|
- `size` (Number)
|
||||||
|
- `sort` (String)
|
||||||
|
- `total_pages` (Number)
|
||||||
|
- `total_rows` (Number)
|
||||||
87
docs/data-sources/postgresflexalpha_instance.md
Normal file
87
docs/data-sources/postgresflexalpha_instance.md
Normal file
|
|
@ -0,0 +1,87 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_postgresflexalpha_instance Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_postgresflexalpha_instance (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
data "stackitprivatepreview_postgresflexalpha_instance" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `acl` (List of String) List of IPV4 cidr.
|
||||||
|
- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
|
||||||
|
- `connection_info` (Attributes) The DNS name and port in the instance overview (see [below for nested schema](#nestedatt--connection_info))
|
||||||
|
- `encryption` (Attributes) The configuration for instance's volume and backup storage encryption.
|
||||||
|
|
||||||
|
⚠︝ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected. (see [below for nested schema](#nestedatt--encryption))
|
||||||
|
- `flavor_id` (String) The id of the instance flavor.
|
||||||
|
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
|
||||||
|
- `name` (String) The name of the instance.
|
||||||
|
- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
|
||||||
|
- `replicas` (Number) How many replicas the instance should have.
|
||||||
|
- `retention_days` (Number) How long backups are retained. The value can only be between 32 and 365 days.
|
||||||
|
- `status` (String) The current status of the instance.
|
||||||
|
- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
|
||||||
|
- `tf_original_api_id` (String) The ID of the instance.
|
||||||
|
- `version` (String) The Postgres version used for the instance. See [Versions Endpoint](/documentation/postgres-flex-service/version/v3alpha1#tag/Version) for supported version parameters.
|
||||||
|
|
||||||
|
<a id="nestedatt--connection_info"></a>
|
||||||
|
### Nested Schema for `connection_info`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `host` (String) The host of the instance.
|
||||||
|
- `port` (Number) The port of the instance.
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--encryption"></a>
|
||||||
|
### Nested Schema for `encryption`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `kek_key_id` (String) The encryption-key key identifier
|
||||||
|
- `kek_key_ring_id` (String) The encryption-key keyring identifier
|
||||||
|
- `kek_key_version` (String) The encryption-key version
|
||||||
|
- `service_account` (String)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--network"></a>
|
||||||
|
### Nested Schema for `network`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `access_scope` (String) The access scope of the instance. It defines if the instance is public or airgapped.
|
||||||
|
- `acl` (List of String) List of IPV4 cidr.
|
||||||
|
- `instance_address` (String)
|
||||||
|
- `router_address` (String)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--storage"></a>
|
||||||
|
### Nested Schema for `storage`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `performance_class` (String) The storage class for the storage.
|
||||||
|
- `size` (Number) The storage size in Gigabytes.
|
||||||
42
docs/data-sources/postgresflexalpha_user.md
Normal file
42
docs/data-sources/postgresflexalpha_user.md
Normal file
|
|
@ -0,0 +1,42 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_postgresflexalpha_user Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_postgresflexalpha_user (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
data "stackitprivatepreview_postgresflexalpha_user" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
user_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
- `user_id` (Number) The ID of the user.
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `id` (String) Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`,`user_id`\".",
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `name` (String) The name of the user.
|
||||||
|
- `roles` (List of String) A list of user roles.
|
||||||
|
- `status` (String) The current status of the user.
|
||||||
|
- `tf_original_api_id` (Number) The ID of the user.
|
||||||
32
docs/data-sources/sqlserverflexalpha_database.md
Normal file
32
docs/data-sources/sqlserverflexalpha_database.md
Normal file
|
|
@ -0,0 +1,32 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexalpha_database Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexalpha_database (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `database_name` (String) The name of the database.
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
|
||||||
|
- `compatibility_level` (Number) CompatibilityLevel of the Database.
|
||||||
|
- `id` (String) The terraform internal identifier.
|
||||||
|
- `name` (String) The name of the database.
|
||||||
|
- `owner` (String) The owner of the database.
|
||||||
|
- `tf_original_api_id` (Number) The id of the database.
|
||||||
54
docs/data-sources/sqlserverflexalpha_flavor.md
Normal file
54
docs/data-sources/sqlserverflexalpha_flavor.md
Normal file
|
|
@ -0,0 +1,54 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexalpha_flavor Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexalpha_flavor (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
data "stackitprivatepreview_sqlserverflexalpha_flavor" "flavor" {
|
||||||
|
project_id = var.project_id
|
||||||
|
region = var.region
|
||||||
|
cpu = 4
|
||||||
|
ram = 16
|
||||||
|
node_type = "Single"
|
||||||
|
storage_class = "premium-perf2-stackit"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `cpu` (Number) The cpu count of the instance.
|
||||||
|
- `node_type` (String) defines the nodeType it can be either single or HA
|
||||||
|
- `project_id` (String) The project ID of the flavor.
|
||||||
|
- `ram` (Number) The memory of the instance in Gibibyte.
|
||||||
|
- `region` (String) The region of the flavor.
|
||||||
|
- `storage_class` (String) The memory of the instance in Gibibyte.
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `description` (String) The flavor description.
|
||||||
|
- `flavor_id` (String) The id of the instance flavor.
|
||||||
|
- `id` (String) The id of the instance flavor.
|
||||||
|
- `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte.
|
||||||
|
- `min_gb` (Number) minimum storage which is required to order in Gigabyte.
|
||||||
|
- `storage_classes` (Attributes List) maximum storage which can be ordered for the flavor in Gigabyte. (see [below for nested schema](#nestedatt--storage_classes))
|
||||||
|
|
||||||
|
<a id="nestedatt--storage_classes"></a>
|
||||||
|
### Nested Schema for `storage_classes`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `class` (String)
|
||||||
|
- `max_io_per_sec` (Number)
|
||||||
|
- `max_through_in_mb` (Number)
|
||||||
77
docs/data-sources/sqlserverflexalpha_instance.md
Normal file
77
docs/data-sources/sqlserverflexalpha_instance.md
Normal file
|
|
@ -0,0 +1,77 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexalpha_instance Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexalpha_instance (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
data "stackitprivatepreview_sqlserverflexalpha_instance" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
|
||||||
|
- `edition` (String) Edition of the MSSQL server instance
|
||||||
|
- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption))
|
||||||
|
- `flavor_id` (String) The id of the instance flavor.
|
||||||
|
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
|
||||||
|
- `name` (String) The name of the instance.
|
||||||
|
- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
|
||||||
|
- `replicas` (Number) How many replicas the instance should have.
|
||||||
|
- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365
|
||||||
|
- `status` (String)
|
||||||
|
- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
|
||||||
|
- `tf_original_api_id` (String) The ID of the instance.
|
||||||
|
- `version` (String) The sqlserver version used for the instance.
|
||||||
|
|
||||||
|
<a id="nestedatt--encryption"></a>
|
||||||
|
### Nested Schema for `encryption`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `kek_key_id` (String) The key identifier
|
||||||
|
- `kek_key_ring_id` (String) The keyring identifier
|
||||||
|
- `kek_key_version` (String) The key version
|
||||||
|
- `service_account` (String)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--network"></a>
|
||||||
|
### Nested Schema for `network`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `access_scope` (String) The network access scope of the instance
|
||||||
|
|
||||||
|
⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.
|
||||||
|
- `acl` (List of String) List of IPV4 cidr.
|
||||||
|
- `instance_address` (String)
|
||||||
|
- `router_address` (String)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--storage"></a>
|
||||||
|
### Nested Schema for `storage`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `class` (String) The storage class for the storage.
|
||||||
|
- `size` (Number) The storage size in Gigabytes.
|
||||||
62
docs/data-sources/sqlserverflexalpha_user.md
Normal file
62
docs/data-sources/sqlserverflexalpha_user.md
Normal file
|
|
@ -0,0 +1,62 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexalpha_user Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexalpha_user (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
data "stackitprivatepreview_sqlserverflexalpha_user" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
user_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `page` (Number) Number of the page of items list to be returned.
|
||||||
|
- `size` (Number) Number of items to be returned on each page.
|
||||||
|
- `sort` (String) Sorting of the users to be returned on each page.
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `pagination` (Attributes) (see [below for nested schema](#nestedatt--pagination))
|
||||||
|
- `users` (Attributes List) List of all users inside an instance (see [below for nested schema](#nestedatt--users))
|
||||||
|
|
||||||
|
<a id="nestedatt--pagination"></a>
|
||||||
|
### Nested Schema for `pagination`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `page` (Number)
|
||||||
|
- `size` (Number)
|
||||||
|
- `sort` (String)
|
||||||
|
- `total_pages` (Number)
|
||||||
|
- `total_rows` (Number)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--users"></a>
|
||||||
|
### Nested Schema for `users`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `status` (String) The current status of the user.
|
||||||
|
- `tf_original_api_id` (Number) The ID of the user.
|
||||||
|
- `username` (String) The name of the user.
|
||||||
40
docs/data-sources/sqlserverflexbeta_database.md
Normal file
40
docs/data-sources/sqlserverflexbeta_database.md
Normal file
|
|
@ -0,0 +1,40 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexbeta_database Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexbeta_database (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
data "stackitprivatepreview_sqlserverflexbeta_database" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
database_name = "dbname"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `database_name` (String) The name of the database.
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
|
||||||
|
- `compatibility_level` (Number) CompatibilityLevel of the Database.
|
||||||
|
- `id` (String) The terraform internal identifier.
|
||||||
|
- `name` (String) The name of the database.
|
||||||
|
- `owner` (String) The owner of the database.
|
||||||
|
- `tf_original_api_id` (Number) The id of the database.
|
||||||
54
docs/data-sources/sqlserverflexbeta_flavor.md
Normal file
54
docs/data-sources/sqlserverflexbeta_flavor.md
Normal file
|
|
@ -0,0 +1,54 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexbeta_flavor Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexbeta_flavor (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
data "stackitprivatepreview_sqlserverflexbeta_flavor" "flavor" {
|
||||||
|
project_id = var.project_id
|
||||||
|
region = var.region
|
||||||
|
cpu = 4
|
||||||
|
ram = 16
|
||||||
|
node_type = "Single"
|
||||||
|
storage_class = "premium-perf2-stackit"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `cpu` (Number) The cpu count of the instance.
|
||||||
|
- `node_type` (String) defines the nodeType it can be either single or HA
|
||||||
|
- `project_id` (String) The project ID of the flavor.
|
||||||
|
- `ram` (Number) The memory of the instance in Gibibyte.
|
||||||
|
- `region` (String) The region of the flavor.
|
||||||
|
- `storage_class` (String) The memory of the instance in Gibibyte.
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `description` (String) The flavor description.
|
||||||
|
- `flavor_id` (String) The id of the instance flavor.
|
||||||
|
- `id` (String) The id of the instance flavor.
|
||||||
|
- `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte.
|
||||||
|
- `min_gb` (Number) minimum storage which is required to order in Gigabyte.
|
||||||
|
- `storage_classes` (Attributes List) maximum storage which can be ordered for the flavor in Gigabyte. (see [below for nested schema](#nestedatt--storage_classes))
|
||||||
|
|
||||||
|
<a id="nestedatt--storage_classes"></a>
|
||||||
|
### Nested Schema for `storage_classes`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `class` (String)
|
||||||
|
- `max_io_per_sec` (Number)
|
||||||
|
- `max_through_in_mb` (Number)
|
||||||
77
docs/data-sources/sqlserverflexbeta_instance.md
Normal file
77
docs/data-sources/sqlserverflexbeta_instance.md
Normal file
|
|
@ -0,0 +1,77 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexbeta_instance Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexbeta_instance (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
data "stackitprivatepreview_sqlserverflexbeta_instance" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
|
||||||
|
- `edition` (String) Edition of the MSSQL server instance
|
||||||
|
- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption))
|
||||||
|
- `flavor_id` (String) The id of the instance flavor.
|
||||||
|
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
|
||||||
|
- `name` (String) The name of the instance.
|
||||||
|
- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
|
||||||
|
- `replicas` (Number) How many replicas the instance should have.
|
||||||
|
- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365
|
||||||
|
- `status` (String)
|
||||||
|
- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
|
||||||
|
- `tf_original_api_id` (String) The ID of the instance.
|
||||||
|
- `version` (String) The sqlserver version used for the instance.
|
||||||
|
|
||||||
|
<a id="nestedatt--encryption"></a>
|
||||||
|
### Nested Schema for `encryption`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `kek_key_id` (String) The key identifier
|
||||||
|
- `kek_key_ring_id` (String) The keyring identifier
|
||||||
|
- `kek_key_version` (String) The key version
|
||||||
|
- `service_account` (String)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--network"></a>
|
||||||
|
### Nested Schema for `network`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `access_scope` (String) The network access scope of the instance
|
||||||
|
|
||||||
|
⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.
|
||||||
|
- `acl` (List of String) List of IPV4 cidr.
|
||||||
|
- `instance_address` (String)
|
||||||
|
- `router_address` (String)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--storage"></a>
|
||||||
|
### Nested Schema for `storage`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `class` (String) The storage class for the storage.
|
||||||
|
- `size` (Number) The storage size in Gigabytes.
|
||||||
54
docs/data-sources/sqlserverflexbeta_user.md
Normal file
54
docs/data-sources/sqlserverflexbeta_user.md
Normal file
|
|
@ -0,0 +1,54 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexbeta_user Data Source - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexbeta_user (Data Source)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `page` (Number) Number of the page of items list to be returned.
|
||||||
|
- `size` (Number) Number of items to be returned on each page.
|
||||||
|
- `sort` (String) Sorting of the users to be returned on each page.
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `pagination` (Attributes) (see [below for nested schema](#nestedatt--pagination))
|
||||||
|
- `users` (Attributes List) List of all users inside an instance (see [below for nested schema](#nestedatt--users))
|
||||||
|
|
||||||
|
<a id="nestedatt--pagination"></a>
|
||||||
|
### Nested Schema for `pagination`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `page` (Number)
|
||||||
|
- `size` (Number)
|
||||||
|
- `sort` (String)
|
||||||
|
- `total_pages` (Number)
|
||||||
|
- `total_rows` (Number)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--users"></a>
|
||||||
|
### Nested Schema for `users`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `status` (String) The current status of the user.
|
||||||
|
- `tf_original_api_id` (Number) The ID of the user.
|
||||||
|
- `username` (String) The name of the user.
|
||||||
83
docs/index.md
Normal file
83
docs/index.md
Normal file
|
|
@ -0,0 +1,83 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview Provider"
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview Provider
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
provider "stackitprivatepreview" {
|
||||||
|
default_region = "eu01"
|
||||||
|
}
|
||||||
|
|
||||||
|
provider "stackitprivatepreview" {
|
||||||
|
default_region = "eu01"
|
||||||
|
service_account_key_path = "service_account.json"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Authentication
|
||||||
|
|
||||||
|
# Key flow
|
||||||
|
provider "stackitprivatepreview" {
|
||||||
|
default_region = "eu01"
|
||||||
|
service_account_key = var.service_account_key
|
||||||
|
private_key = var.private_key
|
||||||
|
}
|
||||||
|
|
||||||
|
# Key flow (using path)
|
||||||
|
provider "stackitprivatepreview" {
|
||||||
|
default_region = "eu01"
|
||||||
|
service_account_key_path = var.service_account_key_path
|
||||||
|
private_key_path = var.private_key_path
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `authorization_custom_endpoint` (String) Custom endpoint for the Membership service
|
||||||
|
- `cdn_custom_endpoint` (String) Custom endpoint for the CDN service
|
||||||
|
- `credentials_path` (String) Path of JSON from where the credentials are read. Takes precedence over the env var `STACKIT_CREDENTIALS_PATH`. Default value is `~/.stackit/credentials.json`.
|
||||||
|
- `default_region` (String) Region will be used as the default location for regional services. Not all services require a region, some are global
|
||||||
|
- `dns_custom_endpoint` (String) Custom endpoint for the DNS service
|
||||||
|
- `enable_beta_resources` (Boolean) Enable beta resources. Default is false.
|
||||||
|
- `experiments` (List of String) Enables experiments. These are unstable features without official support. More information can be found in the README. Available Experiments: iam, routing-tables, network
|
||||||
|
- `git_custom_endpoint` (String) Custom endpoint for the Git service
|
||||||
|
- `iaas_custom_endpoint` (String) Custom endpoint for the IaaS service
|
||||||
|
- `kms_custom_endpoint` (String) Custom endpoint for the KMS service
|
||||||
|
- `loadbalancer_custom_endpoint` (String) Custom endpoint for the Load Balancer service
|
||||||
|
- `logme_custom_endpoint` (String) Custom endpoint for the LogMe service
|
||||||
|
- `mariadb_custom_endpoint` (String) Custom endpoint for the MariaDB service
|
||||||
|
- `modelserving_custom_endpoint` (String) Custom endpoint for the AI Model Serving service
|
||||||
|
- `mongodbflex_custom_endpoint` (String) Custom endpoint for the MongoDB Flex service
|
||||||
|
- `objectstorage_custom_endpoint` (String) Custom endpoint for the Object Storage service
|
||||||
|
- `observability_custom_endpoint` (String) Custom endpoint for the Observability service
|
||||||
|
- `opensearch_custom_endpoint` (String) Custom endpoint for the OpenSearch service
|
||||||
|
- `postgresflex_custom_endpoint` (String) Custom endpoint for the PostgresFlex service
|
||||||
|
- `private_key` (String) Private RSA key used for authentication, relevant for the key flow. It takes precedence over the private key that is included in the service account key.
|
||||||
|
- `private_key_path` (String) Path for the private RSA key used for authentication, relevant for the key flow. It takes precedence over the private key that is included in the service account key.
|
||||||
|
- `rabbitmq_custom_endpoint` (String) Custom endpoint for the RabbitMQ service
|
||||||
|
- `redis_custom_endpoint` (String) Custom endpoint for the Redis service
|
||||||
|
- `region` (String, Deprecated) Region will be used as the default location for regional services. Not all services require a region, some are global
|
||||||
|
- `resourcemanager_custom_endpoint` (String) Custom endpoint for the Resource Manager service
|
||||||
|
- `scf_custom_endpoint` (String) Custom endpoint for the Cloud Foundry (SCF) service
|
||||||
|
- `secretsmanager_custom_endpoint` (String) Custom endpoint for the Secrets Manager service
|
||||||
|
- `server_backup_custom_endpoint` (String) Custom endpoint for the Server Backup service
|
||||||
|
- `server_update_custom_endpoint` (String) Custom endpoint for the Server Update service
|
||||||
|
- `service_account_custom_endpoint` (String) Custom endpoint for the Service Account service
|
||||||
|
- `service_account_email` (String, Deprecated) Service account email. It can also be set using the environment variable STACKIT_SERVICE_ACCOUNT_EMAIL. It is required if you want to use the resource manager project resource.
|
||||||
|
- `service_account_key` (String) Service account key used for authentication. If set, the key flow will be used to authenticate all operations.
|
||||||
|
- `service_account_key_path` (String) Path for the service account key used for authentication. If set, the key flow will be used to authenticate all operations.
|
||||||
|
- `service_account_token` (String, Deprecated) Token used for authentication. If set, the token flow will be used to authenticate all operations.
|
||||||
|
- `service_enablement_custom_endpoint` (String) Custom endpoint for the Service Enablement API
|
||||||
|
- `ske_custom_endpoint` (String) Custom endpoint for the Kubernetes Engine (SKE) service
|
||||||
|
- `sqlserverflex_custom_endpoint` (String) Custom endpoint for the SQL Server Flex service
|
||||||
|
- `token_custom_endpoint` (String) Custom endpoint for the token API, which is used to request access tokens when using the key flow
|
||||||
57
docs/resources/postgresflexalpha_database.md
Normal file
57
docs/resources/postgresflexalpha_database.md
Normal file
|
|
@ -0,0 +1,57 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_postgresflexalpha_database Resource - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_postgresflexalpha_database (Resource)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
resource "stackitprivatepreview_postgresflexalpha_database" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
name = "mydb"
|
||||||
|
owner = "myusername"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Only use the import statement, if you want to import an existing postgresflex database
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_postgresflexalpha_database.import-example
|
||||||
|
id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.postgres_database_id}"
|
||||||
|
}
|
||||||
|
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_postgresflexalpha_database.import-example
|
||||||
|
identity = {
|
||||||
|
project_id = "project_id"
|
||||||
|
region = "region"
|
||||||
|
instance_id = "instance_id"
|
||||||
|
database_id = "database_id"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `name` (String) The name of the database.
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `database_id` (Number) The ID of the database.
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `owner` (String) The owner of the database.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `id` (Number) The id of the database.
|
||||||
131
docs/resources/postgresflexalpha_instance.md
Normal file
131
docs/resources/postgresflexalpha_instance.md
Normal file
|
|
@ -0,0 +1,131 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_postgresflexalpha_instance Resource - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_postgresflexalpha_instance (Resource)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
resource "stackitprivatepreview_postgresflexalpha_instance" "example-instance" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
name = "example-instance"
|
||||||
|
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
||||||
|
backup_schedule = "0 0 * * *"
|
||||||
|
retention_days = 30
|
||||||
|
flavor_id = "flavor.id"
|
||||||
|
replicas = 1
|
||||||
|
storage = {
|
||||||
|
performance_class = "premium-perf2-stackit"
|
||||||
|
size = 10
|
||||||
|
}
|
||||||
|
encryption = {
|
||||||
|
kek_key_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
kek_key_ring_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
kek_key_version = 1
|
||||||
|
service_account = "service@account.email"
|
||||||
|
}
|
||||||
|
network = {
|
||||||
|
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
||||||
|
access_scope = "PUBLIC"
|
||||||
|
}
|
||||||
|
version = 17
|
||||||
|
}
|
||||||
|
|
||||||
|
# Only use the import statement, if you want to import an existing postgresflex instance
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_postgresflexalpha_instance.import-example
|
||||||
|
id = "${var.project_id},${var.region},${var.postgres_instance_id}"
|
||||||
|
}
|
||||||
|
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_postgresflexalpha_instance.import-example
|
||||||
|
identity = {
|
||||||
|
project_id = var.project_id
|
||||||
|
region = var.region
|
||||||
|
instance_id = var.postgres_instance_id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
|
||||||
|
- `flavor_id` (String) The id of the instance flavor.
|
||||||
|
- `name` (String) The name of the instance.
|
||||||
|
- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
|
||||||
|
- `replicas` (Number) How many replicas the instance should have.
|
||||||
|
- `retention_days` (Number) How long backups are retained. The value can only be between 32 and 365 days.
|
||||||
|
- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
|
||||||
|
- `version` (String) The Postgres version used for the instance. See [Versions Endpoint](/documentation/postgres-flex-service/version/v3alpha1#tag/Version) for supported version parameters.
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `encryption` (Attributes) The configuration for instance's volume and backup storage encryption.
|
||||||
|
|
||||||
|
⚠︝ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected. (see [below for nested schema](#nestedatt--encryption))
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `acl` (List of String) List of IPV4 cidr.
|
||||||
|
- `connection_info` (Attributes) The DNS name and port in the instance overview (see [below for nested schema](#nestedatt--connection_info))
|
||||||
|
- `id` (String) The ID of the instance.
|
||||||
|
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
|
||||||
|
- `status` (String) The current status of the instance.
|
||||||
|
|
||||||
|
<a id="nestedatt--network"></a>
|
||||||
|
### Nested Schema for `network`
|
||||||
|
|
||||||
|
Required:
|
||||||
|
|
||||||
|
- `acl` (List of String) List of IPV4 cidr.
|
||||||
|
|
||||||
|
Optional:
|
||||||
|
|
||||||
|
- `access_scope` (String) The access scope of the instance. It defines if the instance is public or airgapped.
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `instance_address` (String)
|
||||||
|
- `router_address` (String)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--storage"></a>
|
||||||
|
### Nested Schema for `storage`
|
||||||
|
|
||||||
|
Required:
|
||||||
|
|
||||||
|
- `performance_class` (String) The storage class for the storage.
|
||||||
|
- `size` (Number) The storage size in Gigabytes.
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--encryption"></a>
|
||||||
|
### Nested Schema for `encryption`
|
||||||
|
|
||||||
|
Required:
|
||||||
|
|
||||||
|
- `kek_key_id` (String) The encryption-key key identifier
|
||||||
|
- `kek_key_ring_id` (String) The encryption-key keyring identifier
|
||||||
|
- `kek_key_version` (String) The encryption-key version
|
||||||
|
- `service_account` (String)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--connection_info"></a>
|
||||||
|
### Nested Schema for `connection_info`
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `host` (String) The host of the instance.
|
||||||
|
- `port` (Number) The port of the instance.
|
||||||
59
docs/resources/postgresflexalpha_user.md
Normal file
59
docs/resources/postgresflexalpha_user.md
Normal file
|
|
@ -0,0 +1,59 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_postgresflexalpha_user Resource - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_postgresflexalpha_user (Resource)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
resource "stackitprivatepreview_postgresflexalpha_user" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
name = "username"
|
||||||
|
roles = ["role"]
|
||||||
|
}
|
||||||
|
|
||||||
|
# Only use the import statement, if you want to import an existing postgresflex user
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_postgresflexalpha_user.import-example
|
||||||
|
id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.user_id}"
|
||||||
|
}
|
||||||
|
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_postgresflexalpha_user.import-example
|
||||||
|
identity = {
|
||||||
|
project_id = "project.id"
|
||||||
|
region = "region"
|
||||||
|
instance_id = "instance.id"
|
||||||
|
user_id = "user.id"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `name` (String) The name of the user.
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
- `roles` (List of String) A list containing the user roles for the instance.
|
||||||
|
- `user_id` (Number) The ID of the user.
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `id` (Number) The ID of the user.
|
||||||
|
- `password` (String) The password for the user.
|
||||||
|
- `status` (String) The current status of the user.
|
||||||
63
docs/resources/sqlserverflexalpha_database.md
Normal file
63
docs/resources/sqlserverflexalpha_database.md
Normal file
|
|
@ -0,0 +1,63 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexalpha_database Resource - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexalpha_database (Resource)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
resource "stackitprivatepreview_sqlserverflexalpha_database" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
collation = ""
|
||||||
|
compatibility = "160"
|
||||||
|
name = ""
|
||||||
|
owner = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
# Only use the import statement, if you want to import a existing sqlserverflex database
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_sqlserverflexalpha_database.import-example
|
||||||
|
id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
|
||||||
|
}
|
||||||
|
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_sqlserverflexalpha_database.import-example
|
||||||
|
identity = {
|
||||||
|
project_id = "project.id"
|
||||||
|
region = "region"
|
||||||
|
instance_id = "instance.id"
|
||||||
|
database_id = "database.id"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `name` (String) The name of the database.
|
||||||
|
- `owner` (String) The owner of the database.
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `collation` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
|
||||||
|
- `compatibility` (Number) CompatibilityLevel of the Database.
|
||||||
|
- `database_name` (String) The name of the database.
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
|
||||||
|
- `compatibility_level` (Number) CompatibilityLevel of the Database.
|
||||||
|
- `id` (Number) The id of the database.
|
||||||
103
docs/resources/sqlserverflexalpha_instance.md
Normal file
103
docs/resources/sqlserverflexalpha_instance.md
Normal file
|
|
@ -0,0 +1,103 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexalpha_instance Resource - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexalpha_instance (Resource)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
resource "stackitprivatepreview_sqlserverflexalpha_instance" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
name = "example-instance"
|
||||||
|
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
||||||
|
backup_schedule = "00 00 * * *"
|
||||||
|
flavor = {
|
||||||
|
cpu = 4
|
||||||
|
ram = 16
|
||||||
|
}
|
||||||
|
storage = {
|
||||||
|
class = "class"
|
||||||
|
size = 5
|
||||||
|
}
|
||||||
|
version = 2022
|
||||||
|
}
|
||||||
|
|
||||||
|
# Only use the import statement, if you want to import an existing sqlserverflex instance
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_sqlserverflexalpha_instance.import-example
|
||||||
|
id = "${var.project_id},${var.region},${var.sql_instance_id}"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
|
||||||
|
- `flavor_id` (String) The id of the instance flavor.
|
||||||
|
- `name` (String) The name of the instance.
|
||||||
|
- `network` (Attributes) the network configuration of the instance. (see [below for nested schema](#nestedatt--network))
|
||||||
|
- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365
|
||||||
|
- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
|
||||||
|
- `version` (String) The sqlserver version used for the instance.
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption))
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `edition` (String) Edition of the MSSQL server instance
|
||||||
|
- `id` (String) The ID of the instance.
|
||||||
|
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
|
||||||
|
- `replicas` (Number) How many replicas the instance should have.
|
||||||
|
- `status` (String)
|
||||||
|
|
||||||
|
<a id="nestedatt--network"></a>
|
||||||
|
### Nested Schema for `network`
|
||||||
|
|
||||||
|
Required:
|
||||||
|
|
||||||
|
- `acl` (List of String) List of IPV4 cidr.
|
||||||
|
|
||||||
|
Optional:
|
||||||
|
|
||||||
|
- `access_scope` (String) The network access scope of the instance
|
||||||
|
|
||||||
|
⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `instance_address` (String)
|
||||||
|
- `router_address` (String)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--storage"></a>
|
||||||
|
### Nested Schema for `storage`
|
||||||
|
|
||||||
|
Required:
|
||||||
|
|
||||||
|
- `class` (String) The storage class for the storage.
|
||||||
|
- `size` (Number) The storage size in Gigabytes.
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--encryption"></a>
|
||||||
|
### Nested Schema for `encryption`
|
||||||
|
|
||||||
|
Required:
|
||||||
|
|
||||||
|
- `kek_key_id` (String) The key identifier
|
||||||
|
- `kek_key_ring_id` (String) The keyring identifier
|
||||||
|
- `kek_key_version` (String) The key version
|
||||||
|
- `service_account` (String)
|
||||||
53
docs/resources/sqlserverflexalpha_user.md
Normal file
53
docs/resources/sqlserverflexalpha_user.md
Normal file
|
|
@ -0,0 +1,53 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexalpha_user Resource - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexalpha_user (Resource)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
resource "stackitprivatepreview_sqlserverflexalpha_user" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
username = "username"
|
||||||
|
roles = ["role"]
|
||||||
|
}
|
||||||
|
|
||||||
|
# Only use the import statement, if you want to import an existing sqlserverflex user
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_sqlserverflexalpha_user.import-example
|
||||||
|
id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `roles` (List of String) A list containing the user roles for the instance. A list with the valid user roles can be retrieved using the List Roles endpoint.
|
||||||
|
- `username` (String) The name of the user.
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `default_database` (String) The default database for a user of the instance.
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
- `user_id` (Number) The ID of the user.
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `host` (String) The host of the instance in which the user belongs to.
|
||||||
|
- `id` (Number) The ID of the user.
|
||||||
|
- `password` (String) The password for the user.
|
||||||
|
- `port` (Number) The port of the instance in which the user belongs to.
|
||||||
|
- `status` (String) The current status of the user.
|
||||||
|
- `uri` (String) The connection string for the user to the instance.
|
||||||
51
docs/resources/sqlserverflexbeta_database.md
Normal file
51
docs/resources/sqlserverflexbeta_database.md
Normal file
|
|
@ -0,0 +1,51 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexbeta_database Resource - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexbeta_database (Resource)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
resource "stackitprivatepreview_sqlserverflexalpha_user" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
username = "username"
|
||||||
|
roles = ["role"]
|
||||||
|
}
|
||||||
|
|
||||||
|
# Only use the import statement, if you want to import an existing sqlserverflex user
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_sqlserverflexalpha_user.import-example
|
||||||
|
id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `name` (String) The name of the database.
|
||||||
|
- `owner` (String) The owner of the database.
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `collation` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
|
||||||
|
- `compatibility` (Number) CompatibilityLevel of the Database.
|
||||||
|
- `database_name` (String) The name of the database.
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
|
||||||
|
- `compatibility_level` (Number) CompatibilityLevel of the Database.
|
||||||
|
- `id` (Number) The id of the database.
|
||||||
158
docs/resources/sqlserverflexbeta_instance.md
Normal file
158
docs/resources/sqlserverflexbeta_instance.md
Normal file
|
|
@ -0,0 +1,158 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexbeta_instance Resource - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexbeta_instance (Resource)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
# without encryption and SNA
|
||||||
|
resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
name = "example-instance"
|
||||||
|
backup_schedule = "0 3 * * *"
|
||||||
|
retention_days = 31
|
||||||
|
flavor_id = "flavor_id"
|
||||||
|
storage = {
|
||||||
|
class = "premium-perf2-stackit"
|
||||||
|
size = 50
|
||||||
|
}
|
||||||
|
version = 2022
|
||||||
|
network = {
|
||||||
|
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
||||||
|
access_scope = "SNA"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# without encryption and PUBLIC
|
||||||
|
resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
name = "example-instance"
|
||||||
|
backup_schedule = "0 3 * * *"
|
||||||
|
retention_days = 31
|
||||||
|
flavor_id = "flavor_id"
|
||||||
|
storage = {
|
||||||
|
class = "premium-perf2-stackit"
|
||||||
|
size = 50
|
||||||
|
}
|
||||||
|
version = 2022
|
||||||
|
network = {
|
||||||
|
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
||||||
|
access_scope = "PUBLIC"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# with encryption and SNA
|
||||||
|
resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
name = "example-instance"
|
||||||
|
backup_schedule = "0 3 * * *"
|
||||||
|
retention_days = 31
|
||||||
|
flavor_id = "flavor_id"
|
||||||
|
storage = {
|
||||||
|
class = "premium-perf2-stackit"
|
||||||
|
size = 50
|
||||||
|
}
|
||||||
|
version = 2022
|
||||||
|
encryption = {
|
||||||
|
kek_key_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
kek_key_ring_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
kek_key_version = 1
|
||||||
|
service_account = "service_account@email"
|
||||||
|
}
|
||||||
|
network = {
|
||||||
|
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
||||||
|
access_scope = "SNA"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Only use the import statement, if you want to import an existing sqlserverflex instance
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_sqlserverflexalpha_instance.import-example
|
||||||
|
id = "${var.project_id},${var.region},${var.sql_instance_id}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# import with identity
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_sqlserverflexalpha_instance.import-example
|
||||||
|
identity = {
|
||||||
|
project_id = var.project_id
|
||||||
|
region = var.region
|
||||||
|
instance_id = var.sql_instance_id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
|
||||||
|
- `flavor_id` (String) The id of the instance flavor.
|
||||||
|
- `name` (String) The name of the instance.
|
||||||
|
- `network` (Attributes) the network configuration of the instance. (see [below for nested schema](#nestedatt--network))
|
||||||
|
- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365
|
||||||
|
- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
|
||||||
|
- `version` (String) The sqlserver version used for the instance.
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption))
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `edition` (String) Edition of the MSSQL server instance
|
||||||
|
- `id` (String) The ID of the instance.
|
||||||
|
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
|
||||||
|
- `replicas` (Number) How many replicas the instance should have.
|
||||||
|
- `status` (String)
|
||||||
|
|
||||||
|
<a id="nestedatt--network"></a>
|
||||||
|
### Nested Schema for `network`
|
||||||
|
|
||||||
|
Required:
|
||||||
|
|
||||||
|
- `acl` (List of String) List of IPV4 cidr.
|
||||||
|
|
||||||
|
Optional:
|
||||||
|
|
||||||
|
- `access_scope` (String) The network access scope of the instance
|
||||||
|
|
||||||
|
⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.
|
||||||
|
|
||||||
|
Read-Only:
|
||||||
|
|
||||||
|
- `instance_address` (String)
|
||||||
|
- `router_address` (String)
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--storage"></a>
|
||||||
|
### Nested Schema for `storage`
|
||||||
|
|
||||||
|
Required:
|
||||||
|
|
||||||
|
- `class` (String) The storage class for the storage.
|
||||||
|
- `size` (Number) The storage size in Gigabytes.
|
||||||
|
|
||||||
|
|
||||||
|
<a id="nestedatt--encryption"></a>
|
||||||
|
### Nested Schema for `encryption`
|
||||||
|
|
||||||
|
Required:
|
||||||
|
|
||||||
|
- `kek_key_id` (String) The key identifier
|
||||||
|
- `kek_key_ring_id` (String) The keyring identifier
|
||||||
|
- `kek_key_version` (String) The key version
|
||||||
|
- `service_account` (String)
|
||||||
53
docs/resources/sqlserverflexbeta_user.md
Normal file
53
docs/resources/sqlserverflexbeta_user.md
Normal file
|
|
@ -0,0 +1,53 @@
|
||||||
|
---
|
||||||
|
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||||
|
page_title: "stackitprivatepreview_sqlserverflexbeta_user Resource - stackitprivatepreview"
|
||||||
|
subcategory: ""
|
||||||
|
description: |-
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# stackitprivatepreview_sqlserverflexbeta_user (Resource)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```terraform
|
||||||
|
resource "stackitprivatepreview_sqlserverflexalpha_user" "example" {
|
||||||
|
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||||
|
username = "username"
|
||||||
|
roles = ["role"]
|
||||||
|
}
|
||||||
|
|
||||||
|
# Only use the import statement, if you want to import an existing sqlserverflex user
|
||||||
|
import {
|
||||||
|
to = stackitprivatepreview_sqlserverflexalpha_user.import-example
|
||||||
|
id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- schema generated by tfplugindocs -->
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
### Required
|
||||||
|
|
||||||
|
- `roles` (List of String) A list containing the user roles for the instance. A list with the valid user roles can be retrieved using the List Roles endpoint.
|
||||||
|
- `username` (String) The name of the user.
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- `default_database` (String) The default database for a user of the instance.
|
||||||
|
- `instance_id` (String) The ID of the instance.
|
||||||
|
- `project_id` (String) The STACKIT project ID.
|
||||||
|
- `region` (String) The region which should be addressed
|
||||||
|
- `user_id` (Number) The ID of the user.
|
||||||
|
|
||||||
|
### Read-Only
|
||||||
|
|
||||||
|
- `host` (String) The host of the instance in which the user belongs to.
|
||||||
|
- `id` (Number) The ID of the user.
|
||||||
|
- `password` (String) The password for the user.
|
||||||
|
- `port` (Number) The port of the instance in which the user belongs to.
|
||||||
|
- `status` (String) The current status of the user.
|
||||||
|
- `uri` (String) The connection string for the user to the instance.
|
||||||
233
go.mod
233
go.mod
|
|
@ -2,17 +2,10 @@ module tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stac
|
||||||
|
|
||||||
go 1.25.6
|
go 1.25.6
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/SladkyCitron/slogcolor v1.8.0
|
github.com/SladkyCitron/slogcolor v1.8.0
|
||||||
github.com/golang-jwt/jwt/v5 v5.3.1
|
|
||||||
github.com/golangci/golangci-lint/v2 v2.10.1
|
|
||||||
github.com/google/go-cmp v0.7.0
|
github.com/google/go-cmp v0.7.0
|
||||||
github.com/google/uuid v1.6.0
|
github.com/google/uuid v1.6.0
|
||||||
github.com/hashicorp/terraform-plugin-codegen-framework v0.4.1
|
|
||||||
github.com/hashicorp/terraform-plugin-codegen-openapi v0.3.0
|
|
||||||
github.com/hashicorp/terraform-plugin-docs v0.24.0
|
|
||||||
github.com/hashicorp/terraform-plugin-framework v1.17.0
|
github.com/hashicorp/terraform-plugin-framework v1.17.0
|
||||||
github.com/hashicorp/terraform-plugin-framework-validators v0.19.0
|
github.com/hashicorp/terraform-plugin-framework-validators v0.19.0
|
||||||
github.com/hashicorp/terraform-plugin-go v0.29.0
|
github.com/hashicorp/terraform-plugin-go v0.29.0
|
||||||
|
|
@ -26,277 +19,71 @@ require (
|
||||||
github.com/spf13/cobra v1.10.2
|
github.com/spf13/cobra v1.10.2
|
||||||
github.com/stackitcloud/stackit-sdk-go/core v0.21.1
|
github.com/stackitcloud/stackit-sdk-go/core v0.21.1
|
||||||
github.com/stackitcloud/stackit-sdk-go/services/iaasalpha v0.1.23-alpha
|
github.com/stackitcloud/stackit-sdk-go/services/iaasalpha v0.1.23-alpha
|
||||||
|
github.com/stretchr/testify v1.11.1
|
||||||
github.com/teambition/rrule-go v1.8.2
|
github.com/teambition/rrule-go v1.8.2
|
||||||
golang.org/x/tools v0.42.0
|
|
||||||
gopkg.in/yaml.v3 v3.0.1
|
gopkg.in/yaml.v3 v3.0.1
|
||||||
)
|
)
|
||||||
|
|
||||||
require github.com/hashicorp/go-retryablehttp v0.7.8 // indirect
|
require (
|
||||||
|
github.com/hashicorp/go-retryablehttp v0.7.8 // indirect
|
||||||
|
golang.org/x/telemetry v0.0.0-20260213145524-e0ab670178e1 // indirect
|
||||||
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
4d63.com/gocheckcompilerdirectives v1.3.0 // indirect
|
|
||||||
4d63.com/gochecknoglobals v0.2.2 // indirect
|
|
||||||
codeberg.org/chavacava/garif v0.2.0 // indirect
|
|
||||||
codeberg.org/polyfloyd/go-errorlint v1.9.0 // indirect
|
|
||||||
dario.cat/mergo v1.0.1 // indirect
|
dario.cat/mergo v1.0.1 // indirect
|
||||||
dev.gaijin.team/go/exhaustruct/v4 v4.0.0 // indirect
|
|
||||||
dev.gaijin.team/go/golib v0.6.0 // indirect
|
|
||||||
github.com/4meepo/tagalign v1.4.3 // indirect
|
|
||||||
github.com/Abirdcfly/dupword v0.1.7 // indirect
|
|
||||||
github.com/AdminBenni/iota-mixing v1.0.0 // indirect
|
|
||||||
github.com/AlwxSin/noinlineerr v1.0.5 // indirect
|
|
||||||
github.com/Antonboom/errname v1.1.1 // indirect
|
|
||||||
github.com/Antonboom/nilnil v1.1.1 // indirect
|
|
||||||
github.com/Antonboom/testifylint v1.6.4 // indirect
|
|
||||||
github.com/BurntSushi/toml v1.6.0 // indirect
|
|
||||||
github.com/Djarvur/go-err113 v0.1.1 // indirect
|
|
||||||
github.com/Kunde21/markdownfmt/v3 v3.1.0 // indirect
|
|
||||||
github.com/Masterminds/goutils v1.1.1 // indirect
|
|
||||||
github.com/Masterminds/semver/v3 v3.4.0 // indirect
|
|
||||||
github.com/Masterminds/sprig/v3 v3.2.3 // indirect
|
|
||||||
github.com/MirrexOne/unqueryvet v1.5.3 // indirect
|
|
||||||
github.com/OpenPeeDeeP/depguard/v2 v2.2.1 // indirect
|
|
||||||
github.com/ProtonMail/go-crypto v1.3.0 // indirect
|
github.com/ProtonMail/go-crypto v1.3.0 // indirect
|
||||||
github.com/agext/levenshtein v1.2.3 // indirect
|
github.com/agext/levenshtein v1.2.3 // indirect
|
||||||
github.com/alecthomas/chroma/v2 v2.23.1 // indirect
|
|
||||||
github.com/alecthomas/go-check-sumtype v0.3.1 // indirect
|
|
||||||
github.com/alexkohler/nakedret/v2 v2.0.6 // indirect
|
|
||||||
github.com/alexkohler/prealloc v1.0.2 // indirect
|
|
||||||
github.com/alfatraining/structtag v1.0.0 // indirect
|
|
||||||
github.com/alingse/asasalint v0.0.11 // indirect
|
|
||||||
github.com/alingse/nilnesserr v0.2.0 // indirect
|
|
||||||
github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect
|
github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect
|
||||||
github.com/armon/go-radix v1.0.0 // indirect
|
|
||||||
github.com/ashanbrown/forbidigo/v2 v2.3.0 // indirect
|
|
||||||
github.com/ashanbrown/makezero/v2 v2.1.0 // indirect
|
|
||||||
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
|
|
||||||
github.com/bahlo/generic-list-go v0.2.0 // indirect
|
|
||||||
github.com/beorn7/perks v1.0.1 // indirect
|
|
||||||
github.com/bgentry/speakeasy v0.1.0 // indirect
|
|
||||||
github.com/bkielbasa/cyclop v1.2.3 // indirect
|
|
||||||
github.com/blizzy78/varnamelen v0.8.0 // indirect
|
|
||||||
github.com/bmatcuk/doublestar/v4 v4.9.1 // indirect
|
|
||||||
github.com/bombsimon/wsl/v4 v4.7.0 // indirect
|
|
||||||
github.com/bombsimon/wsl/v5 v5.6.0 // indirect
|
|
||||||
github.com/breml/bidichk v0.3.3 // indirect
|
|
||||||
github.com/breml/errchkjson v0.4.1 // indirect
|
|
||||||
github.com/buger/jsonparser v1.1.1 // indirect
|
|
||||||
github.com/butuzov/ireturn v0.4.0 // indirect
|
|
||||||
github.com/butuzov/mirror v1.3.0 // indirect
|
|
||||||
github.com/catenacyber/perfsprint v0.10.1 // indirect
|
|
||||||
github.com/ccojocar/zxcvbn-go v1.0.4 // indirect
|
|
||||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
|
||||||
github.com/charithe/durationcheck v0.0.11 // indirect
|
|
||||||
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc // indirect
|
|
||||||
github.com/charmbracelet/lipgloss v1.1.0 // indirect
|
|
||||||
github.com/charmbracelet/x/ansi v0.10.1 // indirect
|
|
||||||
github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd // indirect
|
|
||||||
github.com/charmbracelet/x/term v0.2.1 // indirect
|
|
||||||
github.com/ckaznocha/intrange v0.3.1 // indirect
|
|
||||||
github.com/cloudflare/circl v1.6.3 // indirect
|
github.com/cloudflare/circl v1.6.3 // indirect
|
||||||
github.com/curioswitch/go-reassign v0.3.0 // indirect
|
|
||||||
github.com/daixiang0/gci v0.13.7 // indirect
|
|
||||||
github.com/dave/dst v0.27.3 // indirect
|
|
||||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
|
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
|
||||||
github.com/denis-tingaikin/go-header v0.5.0 // indirect
|
|
||||||
github.com/dlclark/regexp2 v1.11.5 // indirect
|
|
||||||
github.com/dprotaso/go-yit v0.0.0-20220510233725-9ba8df137936 // indirect
|
|
||||||
github.com/ettle/strcase v0.2.0 // indirect
|
|
||||||
github.com/fatih/color v1.18.0 // indirect
|
github.com/fatih/color v1.18.0 // indirect
|
||||||
github.com/fatih/structtag v1.2.0 // indirect
|
github.com/golang-jwt/jwt/v5 v5.3.1 // indirect
|
||||||
github.com/firefart/nonamedreturns v1.0.6 // indirect
|
|
||||||
github.com/fsnotify/fsnotify v1.5.4 // indirect
|
|
||||||
github.com/fzipp/gocyclo v0.6.0 // indirect
|
|
||||||
github.com/ghostiam/protogetter v0.3.20 // indirect
|
|
||||||
github.com/go-critic/go-critic v0.14.3 // indirect
|
|
||||||
github.com/go-toolsmith/astcast v1.1.0 // indirect
|
|
||||||
github.com/go-toolsmith/astcopy v1.1.0 // indirect
|
|
||||||
github.com/go-toolsmith/astequal v1.2.0 // indirect
|
|
||||||
github.com/go-toolsmith/astfmt v1.1.0 // indirect
|
|
||||||
github.com/go-toolsmith/astp v1.1.0 // indirect
|
|
||||||
github.com/go-toolsmith/strparse v1.1.0 // indirect
|
|
||||||
github.com/go-toolsmith/typep v1.1.0 // indirect
|
|
||||||
github.com/go-viper/mapstructure/v2 v2.5.0 // indirect
|
|
||||||
github.com/go-xmlfmt/xmlfmt v1.1.3 // indirect
|
|
||||||
github.com/gobwas/glob v0.2.3 // indirect
|
|
||||||
github.com/godoc-lint/godoc-lint v0.11.2 // indirect
|
|
||||||
github.com/gofrs/flock v0.13.0 // indirect
|
|
||||||
github.com/golang/protobuf v1.5.4 // indirect
|
github.com/golang/protobuf v1.5.4 // indirect
|
||||||
github.com/golangci/asciicheck v0.5.0 // indirect
|
|
||||||
github.com/golangci/dupl v0.0.0-20250308024227-f665c8d69b32 // indirect
|
|
||||||
github.com/golangci/go-printf-func-name v0.1.1 // indirect
|
|
||||||
github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d // indirect
|
|
||||||
github.com/golangci/golines v0.15.0 // indirect
|
|
||||||
github.com/golangci/misspell v0.8.0 // indirect
|
|
||||||
github.com/golangci/plugin-module-register v0.1.2 // indirect
|
|
||||||
github.com/golangci/revgrep v0.8.0 // indirect
|
|
||||||
github.com/golangci/swaggoswag v0.0.0-20250504205917-77f2aca3143e // indirect
|
|
||||||
github.com/golangci/unconvert v0.0.0-20250410112200-a129a6e6413e // indirect
|
|
||||||
github.com/gordonklaus/ineffassign v0.2.0 // indirect
|
|
||||||
github.com/gostaticanalysis/analysisutil v0.7.1 // indirect
|
|
||||||
github.com/gostaticanalysis/comment v1.5.0 // indirect
|
|
||||||
github.com/gostaticanalysis/forcetypeassert v0.2.0 // indirect
|
|
||||||
github.com/gostaticanalysis/nilerr v0.1.2 // indirect
|
|
||||||
github.com/hashicorp/cli v1.1.7 // indirect
|
|
||||||
github.com/hashicorp/errwrap v1.1.0 // indirect
|
github.com/hashicorp/errwrap v1.1.0 // indirect
|
||||||
github.com/hashicorp/go-checkpoint v0.5.0 // indirect
|
github.com/hashicorp/go-checkpoint v0.5.0 // indirect
|
||||||
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
|
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
|
||||||
github.com/hashicorp/go-cty v1.5.0 // indirect
|
github.com/hashicorp/go-cty v1.5.0 // indirect
|
||||||
github.com/hashicorp/go-hclog v1.6.3 // indirect
|
github.com/hashicorp/go-hclog v1.6.3 // indirect
|
||||||
github.com/hashicorp/go-immutable-radix/v2 v2.1.0 // indirect
|
|
||||||
github.com/hashicorp/go-multierror v1.1.1 // indirect
|
github.com/hashicorp/go-multierror v1.1.1 // indirect
|
||||||
github.com/hashicorp/go-plugin v1.7.0 // indirect
|
github.com/hashicorp/go-plugin v1.7.0 // indirect
|
||||||
github.com/hashicorp/go-uuid v1.0.3 // indirect
|
github.com/hashicorp/go-uuid v1.0.3 // indirect
|
||||||
github.com/hashicorp/go-version v1.8.0 // indirect
|
github.com/hashicorp/go-version v1.8.0 // indirect
|
||||||
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
|
|
||||||
github.com/hashicorp/hc-install v0.9.3 // indirect
|
github.com/hashicorp/hc-install v0.9.3 // indirect
|
||||||
github.com/hashicorp/hcl v1.0.0 // indirect
|
|
||||||
github.com/hashicorp/hcl/v2 v2.24.0 // indirect
|
github.com/hashicorp/hcl/v2 v2.24.0 // indirect
|
||||||
github.com/hashicorp/logutils v1.0.0 // indirect
|
github.com/hashicorp/logutils v1.0.0 // indirect
|
||||||
github.com/hashicorp/terraform-exec v0.25.0 // indirect
|
github.com/hashicorp/terraform-exec v0.25.0 // indirect
|
||||||
github.com/hashicorp/terraform-json v0.27.2 // indirect
|
github.com/hashicorp/terraform-json v0.27.2 // indirect
|
||||||
github.com/hashicorp/terraform-plugin-codegen-spec v0.2.0 // indirect
|
|
||||||
github.com/hashicorp/terraform-plugin-sdk/v2 v2.38.2 // indirect
|
github.com/hashicorp/terraform-plugin-sdk/v2 v2.38.2 // indirect
|
||||||
github.com/hashicorp/terraform-registry-address v0.4.0 // indirect
|
github.com/hashicorp/terraform-registry-address v0.4.0 // indirect
|
||||||
github.com/hashicorp/terraform-svchost v0.2.0 // indirect
|
github.com/hashicorp/terraform-svchost v0.2.0 // indirect
|
||||||
github.com/hashicorp/yamux v0.1.2 // indirect
|
github.com/hashicorp/yamux v0.1.2 // indirect
|
||||||
github.com/hexops/gotextdiff v1.0.3 // indirect
|
|
||||||
github.com/huandu/xstrings v1.4.0 // indirect
|
|
||||||
github.com/imdario/mergo v0.3.16 // indirect
|
|
||||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||||
github.com/jgautheron/goconst v1.8.2 // indirect
|
github.com/kr/text v0.2.0 // indirect
|
||||||
github.com/jingyugao/rowserrcheck v1.1.1 // indirect
|
|
||||||
github.com/jjti/go-spancheck v0.6.5 // indirect
|
|
||||||
github.com/julz/importas v0.2.0 // indirect
|
|
||||||
github.com/karamaru-alpha/copyloopvar v1.2.2 // indirect
|
|
||||||
github.com/kisielk/errcheck v1.9.0 // indirect
|
|
||||||
github.com/kkHAIKE/contextcheck v1.1.6 // indirect
|
|
||||||
github.com/kulti/thelper v0.7.1 // indirect
|
|
||||||
github.com/kunwardeep/paralleltest v1.0.15 // indirect
|
|
||||||
github.com/lasiar/canonicalheader v1.1.2 // indirect
|
|
||||||
github.com/ldez/exptostd v0.4.5 // indirect
|
|
||||||
github.com/ldez/gomoddirectives v0.8.0 // indirect
|
|
||||||
github.com/ldez/grignotin v0.10.1 // indirect
|
|
||||||
github.com/ldez/structtags v0.6.1 // indirect
|
|
||||||
github.com/ldez/tagliatelle v0.7.2 // indirect
|
|
||||||
github.com/ldez/usetesting v0.5.0 // indirect
|
|
||||||
github.com/leonklingele/grouper v1.1.2 // indirect
|
|
||||||
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
|
|
||||||
github.com/macabu/inamedparam v0.2.0 // indirect
|
|
||||||
github.com/magiconair/properties v1.8.6 // indirect
|
|
||||||
github.com/mailru/easyjson v0.7.7 // indirect
|
|
||||||
github.com/manuelarte/embeddedstructfieldcheck v0.4.0 // indirect
|
|
||||||
github.com/manuelarte/funcorder v0.5.0 // indirect
|
|
||||||
github.com/maratori/testableexamples v1.0.1 // indirect
|
|
||||||
github.com/maratori/testpackage v1.1.2 // indirect
|
|
||||||
github.com/matoous/godox v1.1.0 // indirect
|
|
||||||
github.com/mattn/go-colorable v0.1.14 // indirect
|
github.com/mattn/go-colorable v0.1.14 // indirect
|
||||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||||
github.com/mattn/go-runewidth v0.0.16 // indirect
|
|
||||||
github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect
|
|
||||||
github.com/mgechev/revive v1.14.0 // indirect
|
|
||||||
github.com/mitchellh/copystructure v1.2.0 // indirect
|
github.com/mitchellh/copystructure v1.2.0 // indirect
|
||||||
github.com/mitchellh/go-homedir v1.1.0 // indirect
|
|
||||||
github.com/mitchellh/go-testing-interface v1.14.1 // indirect
|
github.com/mitchellh/go-testing-interface v1.14.1 // indirect
|
||||||
github.com/mitchellh/go-wordwrap v1.0.1 // indirect
|
github.com/mitchellh/go-wordwrap v1.0.1 // indirect
|
||||||
github.com/mitchellh/mapstructure v1.5.0 // indirect
|
github.com/mitchellh/mapstructure v1.5.0 // indirect
|
||||||
github.com/mitchellh/reflectwalk v1.0.2 // indirect
|
github.com/mitchellh/reflectwalk v1.0.2 // indirect
|
||||||
github.com/moricho/tparallel v0.3.2 // indirect
|
|
||||||
github.com/muesli/termenv v0.16.0 // indirect
|
|
||||||
github.com/nakabonne/nestif v0.3.1 // indirect
|
|
||||||
github.com/nishanths/exhaustive v0.12.0 // indirect
|
|
||||||
github.com/nishanths/predeclared v0.2.2 // indirect
|
|
||||||
github.com/nunnatsa/ginkgolinter v0.23.0 // indirect
|
|
||||||
github.com/oklog/run v1.2.0 // indirect
|
github.com/oklog/run v1.2.0 // indirect
|
||||||
github.com/pb33f/libopenapi v0.15.0 // indirect
|
|
||||||
github.com/pelletier/go-toml v1.9.5 // indirect
|
|
||||||
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
|
|
||||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
||||||
github.com/posener/complete v1.2.3 // indirect
|
|
||||||
github.com/prometheus/client_golang v1.12.1 // indirect
|
|
||||||
github.com/prometheus/client_model v0.2.0 // indirect
|
|
||||||
github.com/prometheus/common v0.32.1 // indirect
|
|
||||||
github.com/prometheus/procfs v0.7.3 // indirect
|
|
||||||
github.com/quasilyte/go-ruleguard v0.4.5 // indirect
|
|
||||||
github.com/quasilyte/go-ruleguard/dsl v0.3.23 // indirect
|
|
||||||
github.com/quasilyte/gogrep v0.5.0 // indirect
|
|
||||||
github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727 // indirect
|
|
||||||
github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 // indirect
|
|
||||||
github.com/raeperd/recvcheck v0.2.0 // indirect
|
|
||||||
github.com/rivo/uniseg v0.4.7 // indirect
|
|
||||||
github.com/rogpeppe/go-internal v1.14.1 // indirect
|
|
||||||
github.com/ryancurrah/gomodguard v1.4.1 // indirect
|
|
||||||
github.com/ryanrolds/sqlclosecheck v0.5.1 // indirect
|
|
||||||
github.com/sanposhiho/wastedassign/v2 v2.1.0 // indirect
|
|
||||||
github.com/santhosh-tekuri/jsonschema/v6 v6.0.2 // indirect
|
|
||||||
github.com/sashamelentyev/interfacebloat v1.1.0 // indirect
|
|
||||||
github.com/sashamelentyev/usestdlibvars v1.29.0 // indirect
|
|
||||||
github.com/securego/gosec/v2 v2.23.0 // indirect
|
|
||||||
github.com/shopspring/decimal v1.3.1 // indirect
|
|
||||||
github.com/sirupsen/logrus v1.9.4 // indirect
|
|
||||||
github.com/sivchari/containedctx v1.0.3 // indirect
|
|
||||||
github.com/sonatard/noctx v0.4.0 // indirect
|
|
||||||
github.com/sourcegraph/go-diff v0.7.0 // indirect
|
|
||||||
github.com/spf13/afero v1.15.0 // indirect
|
|
||||||
github.com/spf13/cast v1.5.1 // indirect
|
|
||||||
github.com/spf13/jwalterweatherman v1.1.0 // indirect
|
|
||||||
github.com/spf13/pflag v1.0.10 // indirect
|
github.com/spf13/pflag v1.0.10 // indirect
|
||||||
github.com/spf13/viper v1.12.0 // indirect
|
|
||||||
github.com/ssgreg/nlreturn/v2 v2.2.1 // indirect
|
|
||||||
github.com/stbenjam/no-sprintf-host-port v0.3.1 // indirect
|
|
||||||
github.com/stretchr/objx v0.5.2 // indirect
|
|
||||||
github.com/stretchr/testify v1.11.1 // indirect
|
|
||||||
github.com/subosito/gotenv v1.4.1 // indirect
|
|
||||||
github.com/tetafro/godot v1.5.4 // indirect
|
|
||||||
github.com/timakin/bodyclose v0.0.0-20241222091800-1db5c5ca4d67 // indirect
|
|
||||||
github.com/timonwong/loggercheck v0.11.0 // indirect
|
|
||||||
github.com/tomarrell/wrapcheck/v2 v2.12.0 // indirect
|
|
||||||
github.com/tommy-muehle/go-mnd/v2 v2.5.1 // indirect
|
|
||||||
github.com/ultraware/funlen v0.2.0 // indirect
|
|
||||||
github.com/ultraware/whitespace v0.2.0 // indirect
|
|
||||||
github.com/uudashr/gocognit v1.2.0 // indirect
|
|
||||||
github.com/uudashr/iface v1.4.1 // indirect
|
|
||||||
github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect
|
github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect
|
||||||
github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect
|
github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect
|
||||||
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
|
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
|
||||||
github.com/vmware-labs/yaml-jsonpath v0.3.2 // indirect
|
|
||||||
github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect
|
|
||||||
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect
|
|
||||||
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect
|
|
||||||
github.com/xeipuuv/gojsonschema v1.2.0 // indirect
|
|
||||||
github.com/xen0n/gosmopolitan v1.3.0 // indirect
|
|
||||||
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
|
|
||||||
github.com/yagipy/maintidx v1.0.0 // indirect
|
|
||||||
github.com/yeya24/promlinter v0.3.0 // indirect
|
|
||||||
github.com/ykadowak/zerologlint v0.1.5 // indirect
|
|
||||||
github.com/yuin/goldmark v1.7.7 // indirect
|
|
||||||
github.com/yuin/goldmark-meta v1.1.0 // indirect
|
|
||||||
github.com/zclconf/go-cty v1.17.0 // indirect
|
github.com/zclconf/go-cty v1.17.0 // indirect
|
||||||
gitlab.com/bosi/decorder v0.4.2 // indirect
|
|
||||||
go-simpler.org/musttag v0.14.0 // indirect
|
|
||||||
go-simpler.org/sloglint v0.11.1 // indirect
|
|
||||||
go.abhg.dev/goldmark/frontmatter v0.2.0 // indirect
|
|
||||||
go.augendre.info/arangolint v0.4.0 // indirect
|
|
||||||
go.augendre.info/fatcontext v0.9.0 // indirect
|
|
||||||
go.uber.org/multierr v1.10.0 // indirect
|
|
||||||
go.uber.org/zap v1.27.0 // indirect
|
|
||||||
go.yaml.in/yaml/v3 v3.0.4 // indirect
|
|
||||||
golang.org/x/crypto v0.48.0 // indirect
|
golang.org/x/crypto v0.48.0 // indirect
|
||||||
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b // indirect
|
|
||||||
golang.org/x/exp/typeparams v0.0.0-20260209203927-2842357ff358 // indirect
|
|
||||||
golang.org/x/mod v0.33.0 // indirect
|
golang.org/x/mod v0.33.0 // indirect
|
||||||
golang.org/x/net v0.50.0 // indirect
|
golang.org/x/net v0.50.0 // indirect
|
||||||
golang.org/x/sync v0.19.0 // indirect
|
golang.org/x/sync v0.19.0 // indirect
|
||||||
golang.org/x/sys v0.41.0 // indirect
|
golang.org/x/sys v0.41.0 // indirect
|
||||||
golang.org/x/telemetry v0.0.0-20260209163413-e7419c687ee4 // indirect
|
|
||||||
golang.org/x/text v0.34.0 // indirect
|
golang.org/x/text v0.34.0 // indirect
|
||||||
|
golang.org/x/tools v0.42.0 // indirect
|
||||||
google.golang.org/appengine v1.6.8 // indirect
|
google.golang.org/appengine v1.6.8 // indirect
|
||||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20260209200024-4cfbd4190f57 // indirect
|
google.golang.org/genproto/googleapis/rpc v0.0.0-20260209200024-4cfbd4190f57 // indirect
|
||||||
google.golang.org/grpc v1.79.1 // indirect
|
google.golang.org/grpc v1.79.1 // indirect
|
||||||
google.golang.org/protobuf v1.36.11 // indirect
|
google.golang.org/protobuf v1.36.11 // indirect
|
||||||
gopkg.in/ini.v1 v1.67.0 // indirect
|
|
||||||
gopkg.in/yaml.v2 v2.4.0 // indirect
|
|
||||||
honnef.co/go/tools v0.7.0 // indirect
|
|
||||||
mvdan.cc/gofumpt v0.9.2 // indirect
|
|
||||||
mvdan.cc/unparam v0.0.0-20251027182757-5beb8c8f8f15 // indirect
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
tool golang.org/x/tools/cmd/goimports
|
||||||
|
|
|
||||||
|
|
@ -2,13 +2,6 @@
|
||||||
version: "2"
|
version: "2"
|
||||||
run:
|
run:
|
||||||
concurrency: 4
|
concurrency: 4
|
||||||
output:
|
|
||||||
formats:
|
|
||||||
text:
|
|
||||||
print-linter-name: true
|
|
||||||
print-issued-lines: true
|
|
||||||
colors: true
|
|
||||||
path: stdout
|
|
||||||
linters:
|
linters:
|
||||||
enable:
|
enable:
|
||||||
- bodyclose
|
- bodyclose
|
||||||
|
|
@ -75,10 +68,6 @@ linters:
|
||||||
- name: empty-lines
|
- name: empty-lines
|
||||||
- name: early-return
|
- name: early-return
|
||||||
exclusions:
|
exclusions:
|
||||||
paths:
|
|
||||||
- stackit-sdk-generator/
|
|
||||||
- generated/
|
|
||||||
- pkg_gen/
|
|
||||||
generated: lax
|
generated: lax
|
||||||
warn-unused: true
|
warn-unused: true
|
||||||
# Excluding configuration per-path, per-linter, per-text and per-source.
|
# Excluding configuration per-path, per-linter, per-text and per-source.
|
||||||
|
|
@ -87,6 +76,14 @@ linters:
|
||||||
- path: _test\.go
|
- path: _test\.go
|
||||||
linters:
|
linters:
|
||||||
- gochecknoinits
|
- gochecknoinits
|
||||||
|
paths:
|
||||||
|
- third_party/
|
||||||
|
- builtin/
|
||||||
|
- examples/
|
||||||
|
- tools/copy.go
|
||||||
|
- tools/main.go
|
||||||
|
- pkg_gen/
|
||||||
|
- cmd/
|
||||||
formatters:
|
formatters:
|
||||||
enable:
|
enable:
|
||||||
- gofmt
|
- gofmt
|
||||||
|
|
@ -95,3 +92,11 @@ formatters:
|
||||||
goimports:
|
goimports:
|
||||||
local-prefixes:
|
local-prefixes:
|
||||||
- tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview
|
- tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview
|
||||||
|
exclusions:
|
||||||
|
generated: lax
|
||||||
|
paths:
|
||||||
|
- third_party/
|
||||||
|
- builtin/
|
||||||
|
- examples/
|
||||||
|
- pkg_gen/
|
||||||
|
- cmd/
|
||||||
|
|
|
||||||
|
|
@ -20,20 +20,13 @@ func TestName() string {
|
||||||
}
|
}
|
||||||
|
|
||||||
func ActivateEnvironmentHttpMocks() {
|
func ActivateEnvironmentHttpMocks() {
|
||||||
httpmock.RegisterNoResponder(
|
httpmock.RegisterNoResponder(func(req *http.Request) (*http.Response, error) {
|
||||||
func(req *http.Request) (*http.Response, error) {
|
|
||||||
return nil, fmt.Errorf("no responder found for %s %s, please check your http mocks", req.Method, req.URL)
|
return nil, fmt.Errorf("no responder found for %s %s, please check your http mocks", req.Method, req.URL)
|
||||||
},
|
})
|
||||||
)
|
|
||||||
|
httpmock.RegisterRegexpResponder("GET", regexp.MustCompile(`^https://api\.bap\.microsoft\.com/providers/Microsoft\.BusinessAppPlatform/locations/(europe|unitedstates)/environmentLanguages\?api-version=2023-06-01$`),
|
||||||
|
func(req *http.Request) (*http.Response, error) {
|
||||||
|
return httpmock.NewStringResponse(http.StatusOK, httpmock.File("../../services/languages/tests/datasource/Validate_Read/get_languages.json").String()), nil
|
||||||
|
})
|
||||||
|
|
||||||
httpmock.RegisterRegexpResponder(
|
|
||||||
"GET",
|
|
||||||
regexp.MustCompile(`^https://api\.bap\.microsoft\.com/providers/Microsoft\.BusinessAppPlatform/locations/(europe|unitedstates)/environmentLanguages\?api-version=2023-06-01$`),
|
|
||||||
func(_ *http.Request) (*http.Response, error) {
|
|
||||||
return httpmock.NewStringResponse(
|
|
||||||
http.StatusOK,
|
|
||||||
httpmock.File("../../services/languages/tests/datasource/Validate_Read/get_languages.json").String(),
|
|
||||||
), nil
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
19
scripts/lint-golangci-lint.sh
Executable file
19
scripts/lint-golangci-lint.sh
Executable file
|
|
@ -0,0 +1,19 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# This script lints the SDK modules and the internal examples
|
||||||
|
# Pre-requisites: golangci-lint
|
||||||
|
set -eo pipefail
|
||||||
|
|
||||||
|
ROOT_DIR=$(git rev-parse --show-toplevel)
|
||||||
|
GOLANG_CI_YAML_PATH="${ROOT_DIR}/golang-ci.yaml"
|
||||||
|
GOLANG_CI_ARGS="--allow-parallel-runners --timeout=5m --config=${GOLANG_CI_YAML_PATH}"
|
||||||
|
|
||||||
|
if type -p golangci-lint >/dev/null; then
|
||||||
|
:
|
||||||
|
else
|
||||||
|
echo "golangci-lint not installed, unable to proceed."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
cd ${ROOT_DIR}
|
||||||
|
golangci-lint run ${GOLANG_CI_ARGS}
|
||||||
|
|
@ -17,7 +17,11 @@ elif [ "$action" = "tools" ]; then
|
||||||
|
|
||||||
go mod download
|
go mod download
|
||||||
|
|
||||||
go install golang.org/x/tools/cmd/goimports@v0.42.0
|
# go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.62.0
|
||||||
|
go install github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.7.2
|
||||||
|
|
||||||
|
# go install github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs@v0.21.0
|
||||||
|
go install github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs@v0.24.0
|
||||||
else
|
else
|
||||||
echo "Invalid action: '$action', please use $0 help for help"
|
echo "Invalid action: '$action', please use $0 help for help"
|
||||||
fi
|
fi
|
||||||
|
|
|
||||||
|
|
@ -14,5 +14,5 @@ fi
|
||||||
mkdir -p ${ROOT_DIR}/docs
|
mkdir -p ${ROOT_DIR}/docs
|
||||||
|
|
||||||
echo ">> Generating documentation"
|
echo ">> Generating documentation"
|
||||||
go run github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs generate \
|
tfplugindocs generate \
|
||||||
--provider-name "stackitprivatepreview"
|
--provider-name "stackitprivatepreview"
|
||||||
|
|
|
||||||
|
|
@ -32,7 +32,7 @@ const (
|
||||||
type EphemeralProviderData struct {
|
type EphemeralProviderData struct {
|
||||||
ProviderData
|
ProviderData
|
||||||
|
|
||||||
PrivateKey string //nolint:gosec //this is a placeholder and not used in this code
|
PrivateKey string
|
||||||
PrivateKeyPath string
|
PrivateKeyPath string
|
||||||
ServiceAccountKey string
|
ServiceAccountKey string
|
||||||
ServiceAccountKeyPath string
|
ServiceAccountKeyPath string
|
||||||
|
|
@ -105,13 +105,11 @@ func DiagsToError(diags diag.Diagnostics) error {
|
||||||
diagsError := diags.Errors()
|
diagsError := diags.Errors()
|
||||||
diagsStrings := make([]string, 0)
|
diagsStrings := make([]string, 0)
|
||||||
for _, diagnostic := range diagsError {
|
for _, diagnostic := range diagsError {
|
||||||
diagsStrings = append(
|
diagsStrings = append(diagsStrings, fmt.Sprintf(
|
||||||
diagsStrings, fmt.Sprintf(
|
|
||||||
"(%s) %s",
|
"(%s) %s",
|
||||||
diagnostic.Summary(),
|
diagnostic.Summary(),
|
||||||
diagnostic.Detail(),
|
diagnostic.Detail(),
|
||||||
),
|
))
|
||||||
)
|
|
||||||
}
|
}
|
||||||
return fmt.Errorf("%s", strings.Join(diagsStrings, ";"))
|
return fmt.Errorf("%s", strings.Join(diagsStrings, ";"))
|
||||||
}
|
}
|
||||||
|
|
@ -138,22 +136,14 @@ func LogAndAddWarning(ctx context.Context, diags *diag.Diagnostics, summary, det
|
||||||
|
|
||||||
func LogAndAddWarningBeta(ctx context.Context, diags *diag.Diagnostics, name string, resourceType ResourceType) {
|
func LogAndAddWarningBeta(ctx context.Context, diags *diag.Diagnostics, name string, resourceType ResourceType) {
|
||||||
warnTitle := fmt.Sprintf("The %s %q is in beta", resourceType, name)
|
warnTitle := fmt.Sprintf("The %s %q is in beta", resourceType, name)
|
||||||
warnContent := fmt.Sprintf(
|
warnContent := fmt.Sprintf("The %s %q is in beta and may be subject to breaking changes in the future. Use with caution.", resourceType, name)
|
||||||
"The %s %q is in beta and may be subject to breaking changes in the future. Use with caution.",
|
|
||||||
resourceType,
|
|
||||||
name,
|
|
||||||
)
|
|
||||||
tflog.Warn(ctx, fmt.Sprintf("%s | %s", warnTitle, warnContent))
|
tflog.Warn(ctx, fmt.Sprintf("%s | %s", warnTitle, warnContent))
|
||||||
diags.AddWarning(warnTitle, warnContent)
|
diags.AddWarning(warnTitle, warnContent)
|
||||||
}
|
}
|
||||||
|
|
||||||
func LogAndAddErrorBeta(ctx context.Context, diags *diag.Diagnostics, name string, resourceType ResourceType) {
|
func LogAndAddErrorBeta(ctx context.Context, diags *diag.Diagnostics, name string, resourceType ResourceType) {
|
||||||
errTitle := fmt.Sprintf("The %s %q is in beta and beta is not enabled", resourceType, name)
|
errTitle := fmt.Sprintf("The %s %q is in beta and beta is not enabled", resourceType, name)
|
||||||
errContent := fmt.Sprintf(
|
errContent := fmt.Sprintf(`The %s %q is in beta and the beta functionality is currently not enabled. To enable it, set the environment variable STACKIT_TF_ENABLE_BETA_RESOURCES to "true" or set the "enable_beta_resources" provider field to true.`, resourceType, name)
|
||||||
`The %s %q is in beta and the beta functionality is currently not enabled. To enable it, set the environment variable STACKIT_TF_ENABLE_BETA_RESOURCES to "true" or set the "enable_beta_resources" provider field to true.`,
|
|
||||||
resourceType,
|
|
||||||
name,
|
|
||||||
)
|
|
||||||
tflog.Error(ctx, fmt.Sprintf("%s | %s", errTitle, errContent))
|
tflog.Error(ctx, fmt.Sprintf("%s | %s", errTitle, errContent))
|
||||||
diags.AddError(errTitle, errContent)
|
diags.AddError(errTitle, errContent)
|
||||||
}
|
}
|
||||||
|
|
@ -171,10 +161,8 @@ func LogResponse(ctx context.Context) context.Context {
|
||||||
traceId := runtime.GetTraceId(ctx)
|
traceId := runtime.GetTraceId(ctx)
|
||||||
ctx = tflog.SetField(ctx, "x-trace-id", traceId)
|
ctx = tflog.SetField(ctx, "x-trace-id", traceId)
|
||||||
|
|
||||||
tflog.Info(
|
tflog.Info(ctx, "response data", map[string]interface{}{
|
||||||
ctx, "response data", map[string]interface{}{
|
|
||||||
"x-trace-id": traceId,
|
"x-trace-id": traceId,
|
||||||
},
|
})
|
||||||
)
|
|
||||||
return ctx
|
return ctx
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -98,7 +98,7 @@ func (rrt *RetryRoundTripper) retryLoop(
|
||||||
|
|
||||||
waitDuration := rrt.calculateWaitDurationWithJitter(ctx, currentDelay)
|
waitDuration := rrt.calculateWaitDurationWithJitter(ctx, currentDelay)
|
||||||
if err := rrt.waitForDelay(ctx, waitDuration); err != nil {
|
if err := rrt.waitForDelay(ctx, waitDuration); err != nil {
|
||||||
return nil, err // Context was canceled during wait.
|
return nil, err // Context was cancelled during wait.
|
||||||
}
|
}
|
||||||
|
|
||||||
// Exponential backoff for the next potential retry.
|
// Exponential backoff for the next potential retry.
|
||||||
|
|
@ -153,6 +153,7 @@ func (rrt *RetryRoundTripper) handleFinalError(
|
||||||
) error {
|
) error {
|
||||||
if resp != nil {
|
if resp != nil {
|
||||||
if err := resp.Body.Close(); err != nil {
|
if err := resp.Body.Close(); err != nil {
|
||||||
|
|
||||||
tflog.Warn(
|
tflog.Warn(
|
||||||
ctx, "Failed to close response body", map[string]interface{}{
|
ctx, "Failed to close response body", map[string]interface{}{
|
||||||
"error": err.Error(),
|
"error": err.Error(),
|
||||||
|
|
@ -193,6 +194,7 @@ func (rrt *RetryRoundTripper) shouldRetry(resp *http.Response, err error) bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
return false
|
return false
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// calculateWaitDurationWithJitter calculates the backoff duration for the next retry,
|
// calculateWaitDurationWithJitter calculates the backoff duration for the next retry,
|
||||||
|
|
@ -230,7 +232,7 @@ func (rrt *RetryRoundTripper) calculateWaitDurationWithJitter(
|
||||||
func (rrt *RetryRoundTripper) waitForDelay(ctx context.Context, delay time.Duration) error {
|
func (rrt *RetryRoundTripper) waitForDelay(ctx context.Context, delay time.Duration) error {
|
||||||
select {
|
select {
|
||||||
case <-ctx.Done():
|
case <-ctx.Done():
|
||||||
return fmt.Errorf("context canceled during backoff wait: %w", ctx.Err())
|
return fmt.Errorf("context cancelled during backoff wait: %w", ctx.Err())
|
||||||
case <-time.After(delay):
|
case <-time.After(delay):
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -72,7 +72,7 @@ func TestRetryRoundTripper_RoundTrip(t *testing.T) {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
tripper := testRetryConfig(mock)
|
tripper := testRetryConfig(mock)
|
||||||
req := httptest.NewRequest(http.MethodGet, "/", http.NoBody)
|
req := httptest.NewRequest(http.MethodGet, "/", nil)
|
||||||
|
|
||||||
resp, err := tripper.RoundTrip(req)
|
resp, err := tripper.RoundTrip(req)
|
||||||
if resp != nil {
|
if resp != nil {
|
||||||
|
|
@ -110,7 +110,7 @@ func TestRetryRoundTripper_RoundTrip(t *testing.T) {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
tripper := testRetryConfig(mock)
|
tripper := testRetryConfig(mock)
|
||||||
req := httptest.NewRequest(http.MethodGet, "/", http.NoBody)
|
req := httptest.NewRequest(http.MethodGet, "/", nil)
|
||||||
|
|
||||||
resp, err := tripper.RoundTrip(req)
|
resp, err := tripper.RoundTrip(req)
|
||||||
if resp != nil {
|
if resp != nil {
|
||||||
|
|
@ -155,7 +155,7 @@ func TestRetryRoundTripper_RoundTrip(t *testing.T) {
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
tripper := testRetryConfig(mock)
|
tripper := testRetryConfig(mock)
|
||||||
req := httptest.NewRequest(http.MethodGet, "/", http.NoBody)
|
req := httptest.NewRequest(http.MethodGet, "/", nil)
|
||||||
|
|
||||||
resp, err := tripper.RoundTrip(req)
|
resp, err := tripper.RoundTrip(req)
|
||||||
if resp != nil {
|
if resp != nil {
|
||||||
|
|
@ -185,12 +185,12 @@ func TestRetryRoundTripper_RoundTrip(t *testing.T) {
|
||||||
mockErr := errors.New("simulated network error")
|
mockErr := errors.New("simulated network error")
|
||||||
|
|
||||||
mock := &mockRoundTripper{
|
mock := &mockRoundTripper{
|
||||||
roundTripFunc: func(_ *http.Request) (*http.Response, error) {
|
roundTripFunc: func(req *http.Request) (*http.Response, error) {
|
||||||
return nil, mockErr
|
return nil, mockErr
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
tripper := testRetryConfig(mock)
|
tripper := testRetryConfig(mock)
|
||||||
req := httptest.NewRequest(http.MethodGet, "/", http.NoBody)
|
req := httptest.NewRequest(http.MethodGet, "/", nil)
|
||||||
|
|
||||||
resp, err := tripper.RoundTrip(req)
|
resp, err := tripper.RoundTrip(req)
|
||||||
if resp != nil {
|
if resp != nil {
|
||||||
|
|
@ -211,7 +211,7 @@ func TestRetryRoundTripper_RoundTrip(t *testing.T) {
|
||||||
)
|
)
|
||||||
|
|
||||||
t.Run(
|
t.Run(
|
||||||
"should abort retries if the main context is canceled", func(t *testing.T) {
|
"should abort retries if the main context is cancelled", func(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
mock := &mockRoundTripper{
|
mock := &mockRoundTripper{
|
||||||
|
|
@ -230,7 +230,7 @@ func TestRetryRoundTripper_RoundTrip(t *testing.T) {
|
||||||
ctx, cancel := context.WithTimeout(baseCtx, 20*time.Millisecond)
|
ctx, cancel := context.WithTimeout(baseCtx, 20*time.Millisecond)
|
||||||
defer cancel()
|
defer cancel()
|
||||||
|
|
||||||
req := httptest.NewRequest(http.MethodGet, "/", http.NoBody).WithContext(ctx)
|
req := httptest.NewRequest(http.MethodGet, "/", nil).WithContext(ctx)
|
||||||
|
|
||||||
resp, err := tripper.RoundTrip(req)
|
resp, err := tripper.RoundTrip(req)
|
||||||
if resp != nil {
|
if resp != nil {
|
||||||
|
|
|
||||||
|
|
@ -32,8 +32,6 @@ func InstanceDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
MarkdownDescription: "The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.",
|
MarkdownDescription: "The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.",
|
||||||
},
|
},
|
||||||
"connection_info": schema.SingleNestedAttribute{
|
"connection_info": schema.SingleNestedAttribute{
|
||||||
Attributes: map[string]schema.Attribute{
|
|
||||||
"write": schema.SingleNestedAttribute{
|
|
||||||
Attributes: map[string]schema.Attribute{
|
Attributes: map[string]schema.Attribute{
|
||||||
"host": schema.StringAttribute{
|
"host": schema.StringAttribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
|
|
@ -46,24 +44,14 @@ func InstanceDataSourceSchema(ctx context.Context) schema.Schema {
|
||||||
MarkdownDescription: "The port of the instance.",
|
MarkdownDescription: "The port of the instance.",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
CustomType: WriteType{
|
|
||||||
ObjectType: types.ObjectType{
|
|
||||||
AttrTypes: WriteValue{}.AttributeTypes(ctx),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Computed: true,
|
|
||||||
Description: "The DNS name and port in the instance overview",
|
|
||||||
MarkdownDescription: "The DNS name and port in the instance overview",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
CustomType: ConnectionInfoType{
|
CustomType: ConnectionInfoType{
|
||||||
ObjectType: types.ObjectType{
|
ObjectType: types.ObjectType{
|
||||||
AttrTypes: ConnectionInfoValue{}.AttributeTypes(ctx),
|
AttrTypes: ConnectionInfoValue{}.AttributeTypes(ctx),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Computed: true,
|
Computed: true,
|
||||||
Description: "The connection information of the instance",
|
Description: "The DNS name and port in the instance overview",
|
||||||
MarkdownDescription: "The connection information of the instance",
|
MarkdownDescription: "The DNS name and port in the instance overview",
|
||||||
},
|
},
|
||||||
"encryption": schema.SingleNestedAttribute{
|
"encryption": schema.SingleNestedAttribute{
|
||||||
Attributes: map[string]schema.Attribute{
|
Attributes: map[string]schema.Attribute{
|
||||||
|
|
@ -255,22 +243,40 @@ func (t ConnectionInfoType) ValueFromObject(ctx context.Context, in basetypes.Ob
|
||||||
|
|
||||||
attributes := in.Attributes()
|
attributes := in.Attributes()
|
||||||
|
|
||||||
writeAttribute, ok := attributes["write"]
|
hostAttribute, ok := attributes["host"]
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Missing",
|
"Attribute Missing",
|
||||||
`write is missing from object`)
|
`host is missing from object`)
|
||||||
|
|
||||||
return nil, diags
|
return nil, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
writeVal, ok := writeAttribute.(basetypes.ObjectValue)
|
hostVal, ok := hostAttribute.(basetypes.StringValue)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`write expected to be basetypes.ObjectValue, was: %T`, writeAttribute))
|
fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
|
||||||
|
}
|
||||||
|
|
||||||
|
portAttribute, ok := attributes["port"]
|
||||||
|
|
||||||
|
if !ok {
|
||||||
|
diags.AddError(
|
||||||
|
"Attribute Missing",
|
||||||
|
`port is missing from object`)
|
||||||
|
|
||||||
|
return nil, diags
|
||||||
|
}
|
||||||
|
|
||||||
|
portVal, ok := portAttribute.(basetypes.Int64Value)
|
||||||
|
|
||||||
|
if !ok {
|
||||||
|
diags.AddError(
|
||||||
|
"Attribute Wrong Type",
|
||||||
|
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
if diags.HasError() {
|
if diags.HasError() {
|
||||||
|
|
@ -278,7 +284,8 @@ func (t ConnectionInfoType) ValueFromObject(ctx context.Context, in basetypes.Ob
|
||||||
}
|
}
|
||||||
|
|
||||||
return ConnectionInfoValue{
|
return ConnectionInfoValue{
|
||||||
Write: writeVal,
|
Host: hostVal,
|
||||||
|
Port: portVal,
|
||||||
state: attr.ValueStateKnown,
|
state: attr.ValueStateKnown,
|
||||||
}, diags
|
}, diags
|
||||||
}
|
}
|
||||||
|
|
@ -346,22 +353,40 @@ func NewConnectionInfoValue(attributeTypes map[string]attr.Type, attributes map[
|
||||||
return NewConnectionInfoValueUnknown(), diags
|
return NewConnectionInfoValueUnknown(), diags
|
||||||
}
|
}
|
||||||
|
|
||||||
writeAttribute, ok := attributes["write"]
|
hostAttribute, ok := attributes["host"]
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Missing",
|
"Attribute Missing",
|
||||||
`write is missing from object`)
|
`host is missing from object`)
|
||||||
|
|
||||||
return NewConnectionInfoValueUnknown(), diags
|
return NewConnectionInfoValueUnknown(), diags
|
||||||
}
|
}
|
||||||
|
|
||||||
writeVal, ok := writeAttribute.(basetypes.ObjectValue)
|
hostVal, ok := hostAttribute.(basetypes.StringValue)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`write expected to be basetypes.ObjectValue, was: %T`, writeAttribute))
|
fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
|
||||||
|
}
|
||||||
|
|
||||||
|
portAttribute, ok := attributes["port"]
|
||||||
|
|
||||||
|
if !ok {
|
||||||
|
diags.AddError(
|
||||||
|
"Attribute Missing",
|
||||||
|
`port is missing from object`)
|
||||||
|
|
||||||
|
return NewConnectionInfoValueUnknown(), diags
|
||||||
|
}
|
||||||
|
|
||||||
|
portVal, ok := portAttribute.(basetypes.Int64Value)
|
||||||
|
|
||||||
|
if !ok {
|
||||||
|
diags.AddError(
|
||||||
|
"Attribute Wrong Type",
|
||||||
|
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
if diags.HasError() {
|
if diags.HasError() {
|
||||||
|
|
@ -369,7 +394,8 @@ func NewConnectionInfoValue(attributeTypes map[string]attr.Type, attributes map[
|
||||||
}
|
}
|
||||||
|
|
||||||
return ConnectionInfoValue{
|
return ConnectionInfoValue{
|
||||||
Write: writeVal,
|
Host: hostVal,
|
||||||
|
Port: portVal,
|
||||||
state: attr.ValueStateKnown,
|
state: attr.ValueStateKnown,
|
||||||
}, diags
|
}, diags
|
||||||
}
|
}
|
||||||
|
|
@ -442,401 +468,12 @@ func (t ConnectionInfoType) ValueType(ctx context.Context) attr.Value {
|
||||||
var _ basetypes.ObjectValuable = ConnectionInfoValue{}
|
var _ basetypes.ObjectValuable = ConnectionInfoValue{}
|
||||||
|
|
||||||
type ConnectionInfoValue struct {
|
type ConnectionInfoValue struct {
|
||||||
Write basetypes.ObjectValue `tfsdk:"write"`
|
|
||||||
state attr.ValueState
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v ConnectionInfoValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
|
|
||||||
attrTypes := make(map[string]tftypes.Type, 1)
|
|
||||||
|
|
||||||
var val tftypes.Value
|
|
||||||
var err error
|
|
||||||
|
|
||||||
attrTypes["write"] = basetypes.ObjectType{
|
|
||||||
AttrTypes: WriteValue{}.AttributeTypes(ctx),
|
|
||||||
}.TerraformType(ctx)
|
|
||||||
|
|
||||||
objectType := tftypes.Object{AttributeTypes: attrTypes}
|
|
||||||
|
|
||||||
switch v.state {
|
|
||||||
case attr.ValueStateKnown:
|
|
||||||
vals := make(map[string]tftypes.Value, 1)
|
|
||||||
|
|
||||||
val, err = v.Write.ToTerraformValue(ctx)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return tftypes.NewValue(objectType, tftypes.UnknownValue), err
|
|
||||||
}
|
|
||||||
|
|
||||||
vals["write"] = val
|
|
||||||
|
|
||||||
if err := tftypes.ValidateValue(objectType, vals); err != nil {
|
|
||||||
return tftypes.NewValue(objectType, tftypes.UnknownValue), err
|
|
||||||
}
|
|
||||||
|
|
||||||
return tftypes.NewValue(objectType, vals), nil
|
|
||||||
case attr.ValueStateNull:
|
|
||||||
return tftypes.NewValue(objectType, nil), nil
|
|
||||||
case attr.ValueStateUnknown:
|
|
||||||
return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
|
|
||||||
default:
|
|
||||||
panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v ConnectionInfoValue) IsNull() bool {
|
|
||||||
return v.state == attr.ValueStateNull
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v ConnectionInfoValue) IsUnknown() bool {
|
|
||||||
return v.state == attr.ValueStateUnknown
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v ConnectionInfoValue) String() string {
|
|
||||||
return "ConnectionInfoValue"
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v ConnectionInfoValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
|
|
||||||
var diags diag.Diagnostics
|
|
||||||
|
|
||||||
var write basetypes.ObjectValue
|
|
||||||
|
|
||||||
if v.Write.IsNull() {
|
|
||||||
write = types.ObjectNull(
|
|
||||||
WriteValue{}.AttributeTypes(ctx),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
if v.Write.IsUnknown() {
|
|
||||||
write = types.ObjectUnknown(
|
|
||||||
WriteValue{}.AttributeTypes(ctx),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
if !v.Write.IsNull() && !v.Write.IsUnknown() {
|
|
||||||
write = types.ObjectValueMust(
|
|
||||||
WriteValue{}.AttributeTypes(ctx),
|
|
||||||
v.Write.Attributes(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
attributeTypes := map[string]attr.Type{
|
|
||||||
"write": basetypes.ObjectType{
|
|
||||||
AttrTypes: WriteValue{}.AttributeTypes(ctx),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
if v.IsNull() {
|
|
||||||
return types.ObjectNull(attributeTypes), diags
|
|
||||||
}
|
|
||||||
|
|
||||||
if v.IsUnknown() {
|
|
||||||
return types.ObjectUnknown(attributeTypes), diags
|
|
||||||
}
|
|
||||||
|
|
||||||
objVal, diags := types.ObjectValue(
|
|
||||||
attributeTypes,
|
|
||||||
map[string]attr.Value{
|
|
||||||
"write": write,
|
|
||||||
})
|
|
||||||
|
|
||||||
return objVal, diags
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v ConnectionInfoValue) Equal(o attr.Value) bool {
|
|
||||||
other, ok := o.(ConnectionInfoValue)
|
|
||||||
|
|
||||||
if !ok {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
if v.state != other.state {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
if v.state != attr.ValueStateKnown {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
if !v.Write.Equal(other.Write) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v ConnectionInfoValue) Type(ctx context.Context) attr.Type {
|
|
||||||
return ConnectionInfoType{
|
|
||||||
basetypes.ObjectType{
|
|
||||||
AttrTypes: v.AttributeTypes(ctx),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v ConnectionInfoValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
|
|
||||||
return map[string]attr.Type{
|
|
||||||
"write": basetypes.ObjectType{
|
|
||||||
AttrTypes: WriteValue{}.AttributeTypes(ctx),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ basetypes.ObjectTypable = WriteType{}
|
|
||||||
|
|
||||||
type WriteType struct {
|
|
||||||
basetypes.ObjectType
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t WriteType) Equal(o attr.Type) bool {
|
|
||||||
other, ok := o.(WriteType)
|
|
||||||
|
|
||||||
if !ok {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
return t.ObjectType.Equal(other.ObjectType)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t WriteType) String() string {
|
|
||||||
return "WriteType"
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t WriteType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
|
|
||||||
var diags diag.Diagnostics
|
|
||||||
|
|
||||||
attributes := in.Attributes()
|
|
||||||
|
|
||||||
hostAttribute, ok := attributes["host"]
|
|
||||||
|
|
||||||
if !ok {
|
|
||||||
diags.AddError(
|
|
||||||
"Attribute Missing",
|
|
||||||
`host is missing from object`)
|
|
||||||
|
|
||||||
return nil, diags
|
|
||||||
}
|
|
||||||
|
|
||||||
hostVal, ok := hostAttribute.(basetypes.StringValue)
|
|
||||||
|
|
||||||
if !ok {
|
|
||||||
diags.AddError(
|
|
||||||
"Attribute Wrong Type",
|
|
||||||
fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
|
|
||||||
}
|
|
||||||
|
|
||||||
portAttribute, ok := attributes["port"]
|
|
||||||
|
|
||||||
if !ok {
|
|
||||||
diags.AddError(
|
|
||||||
"Attribute Missing",
|
|
||||||
`port is missing from object`)
|
|
||||||
|
|
||||||
return nil, diags
|
|
||||||
}
|
|
||||||
|
|
||||||
portVal, ok := portAttribute.(basetypes.Int64Value)
|
|
||||||
|
|
||||||
if !ok {
|
|
||||||
diags.AddError(
|
|
||||||
"Attribute Wrong Type",
|
|
||||||
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
|
|
||||||
}
|
|
||||||
|
|
||||||
if diags.HasError() {
|
|
||||||
return nil, diags
|
|
||||||
}
|
|
||||||
|
|
||||||
return WriteValue{
|
|
||||||
Host: hostVal,
|
|
||||||
Port: portVal,
|
|
||||||
state: attr.ValueStateKnown,
|
|
||||||
}, diags
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewWriteValueNull() WriteValue {
|
|
||||||
return WriteValue{
|
|
||||||
state: attr.ValueStateNull,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewWriteValueUnknown() WriteValue {
|
|
||||||
return WriteValue{
|
|
||||||
state: attr.ValueStateUnknown,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewWriteValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (WriteValue, diag.Diagnostics) {
|
|
||||||
var diags diag.Diagnostics
|
|
||||||
|
|
||||||
// Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
|
|
||||||
ctx := context.Background()
|
|
||||||
|
|
||||||
for name, attributeType := range attributeTypes {
|
|
||||||
attribute, ok := attributes[name]
|
|
||||||
|
|
||||||
if !ok {
|
|
||||||
diags.AddError(
|
|
||||||
"Missing WriteValue Attribute Value",
|
|
||||||
"While creating a WriteValue value, a missing attribute value was detected. "+
|
|
||||||
"A WriteValue must contain values for all attributes, even if null or unknown. "+
|
|
||||||
"This is always an issue with the provider and should be reported to the provider developers.\n\n"+
|
|
||||||
fmt.Sprintf("WriteValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
|
|
||||||
)
|
|
||||||
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if !attributeType.Equal(attribute.Type(ctx)) {
|
|
||||||
diags.AddError(
|
|
||||||
"Invalid WriteValue Attribute Type",
|
|
||||||
"While creating a WriteValue value, an invalid attribute value was detected. "+
|
|
||||||
"A WriteValue must use a matching attribute type for the value. "+
|
|
||||||
"This is always an issue with the provider and should be reported to the provider developers.\n\n"+
|
|
||||||
fmt.Sprintf("WriteValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
|
|
||||||
fmt.Sprintf("WriteValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for name := range attributes {
|
|
||||||
_, ok := attributeTypes[name]
|
|
||||||
|
|
||||||
if !ok {
|
|
||||||
diags.AddError(
|
|
||||||
"Extra WriteValue Attribute Value",
|
|
||||||
"While creating a WriteValue value, an extra attribute value was detected. "+
|
|
||||||
"A WriteValue must not contain values beyond the expected attribute types. "+
|
|
||||||
"This is always an issue with the provider and should be reported to the provider developers.\n\n"+
|
|
||||||
fmt.Sprintf("Extra WriteValue Attribute Name: %s", name),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if diags.HasError() {
|
|
||||||
return NewWriteValueUnknown(), diags
|
|
||||||
}
|
|
||||||
|
|
||||||
hostAttribute, ok := attributes["host"]
|
|
||||||
|
|
||||||
if !ok {
|
|
||||||
diags.AddError(
|
|
||||||
"Attribute Missing",
|
|
||||||
`host is missing from object`)
|
|
||||||
|
|
||||||
return NewWriteValueUnknown(), diags
|
|
||||||
}
|
|
||||||
|
|
||||||
hostVal, ok := hostAttribute.(basetypes.StringValue)
|
|
||||||
|
|
||||||
if !ok {
|
|
||||||
diags.AddError(
|
|
||||||
"Attribute Wrong Type",
|
|
||||||
fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
|
|
||||||
}
|
|
||||||
|
|
||||||
portAttribute, ok := attributes["port"]
|
|
||||||
|
|
||||||
if !ok {
|
|
||||||
diags.AddError(
|
|
||||||
"Attribute Missing",
|
|
||||||
`port is missing from object`)
|
|
||||||
|
|
||||||
return NewWriteValueUnknown(), diags
|
|
||||||
}
|
|
||||||
|
|
||||||
portVal, ok := portAttribute.(basetypes.Int64Value)
|
|
||||||
|
|
||||||
if !ok {
|
|
||||||
diags.AddError(
|
|
||||||
"Attribute Wrong Type",
|
|
||||||
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
|
|
||||||
}
|
|
||||||
|
|
||||||
if diags.HasError() {
|
|
||||||
return NewWriteValueUnknown(), diags
|
|
||||||
}
|
|
||||||
|
|
||||||
return WriteValue{
|
|
||||||
Host: hostVal,
|
|
||||||
Port: portVal,
|
|
||||||
state: attr.ValueStateKnown,
|
|
||||||
}, diags
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewWriteValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) WriteValue {
|
|
||||||
object, diags := NewWriteValue(attributeTypes, attributes)
|
|
||||||
|
|
||||||
if diags.HasError() {
|
|
||||||
// This could potentially be added to the diag package.
|
|
||||||
diagsStrings := make([]string, 0, len(diags))
|
|
||||||
|
|
||||||
for _, diagnostic := range diags {
|
|
||||||
diagsStrings = append(diagsStrings, fmt.Sprintf(
|
|
||||||
"%s | %s | %s",
|
|
||||||
diagnostic.Severity(),
|
|
||||||
diagnostic.Summary(),
|
|
||||||
diagnostic.Detail()))
|
|
||||||
}
|
|
||||||
|
|
||||||
panic("NewWriteValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
|
|
||||||
}
|
|
||||||
|
|
||||||
return object
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t WriteType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
|
|
||||||
if in.Type() == nil {
|
|
||||||
return NewWriteValueNull(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if !in.Type().Equal(t.TerraformType(ctx)) {
|
|
||||||
return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
|
|
||||||
}
|
|
||||||
|
|
||||||
if !in.IsKnown() {
|
|
||||||
return NewWriteValueUnknown(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if in.IsNull() {
|
|
||||||
return NewWriteValueNull(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
attributes := map[string]attr.Value{}
|
|
||||||
|
|
||||||
val := map[string]tftypes.Value{}
|
|
||||||
|
|
||||||
err := in.As(&val)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
for k, v := range val {
|
|
||||||
a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
attributes[k] = a
|
|
||||||
}
|
|
||||||
|
|
||||||
return NewWriteValueMust(WriteValue{}.AttributeTypes(ctx), attributes), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t WriteType) ValueType(ctx context.Context) attr.Value {
|
|
||||||
return WriteValue{}
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ basetypes.ObjectValuable = WriteValue{}
|
|
||||||
|
|
||||||
type WriteValue struct {
|
|
||||||
Host basetypes.StringValue `tfsdk:"host"`
|
Host basetypes.StringValue `tfsdk:"host"`
|
||||||
Port basetypes.Int64Value `tfsdk:"port"`
|
Port basetypes.Int64Value `tfsdk:"port"`
|
||||||
state attr.ValueState
|
state attr.ValueState
|
||||||
}
|
}
|
||||||
|
|
||||||
func (v WriteValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
|
func (v ConnectionInfoValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
|
||||||
attrTypes := make(map[string]tftypes.Type, 2)
|
attrTypes := make(map[string]tftypes.Type, 2)
|
||||||
|
|
||||||
var val tftypes.Value
|
var val tftypes.Value
|
||||||
|
|
@ -881,19 +518,19 @@ func (v WriteValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (v WriteValue) IsNull() bool {
|
func (v ConnectionInfoValue) IsNull() bool {
|
||||||
return v.state == attr.ValueStateNull
|
return v.state == attr.ValueStateNull
|
||||||
}
|
}
|
||||||
|
|
||||||
func (v WriteValue) IsUnknown() bool {
|
func (v ConnectionInfoValue) IsUnknown() bool {
|
||||||
return v.state == attr.ValueStateUnknown
|
return v.state == attr.ValueStateUnknown
|
||||||
}
|
}
|
||||||
|
|
||||||
func (v WriteValue) String() string {
|
func (v ConnectionInfoValue) String() string {
|
||||||
return "WriteValue"
|
return "ConnectionInfoValue"
|
||||||
}
|
}
|
||||||
|
|
||||||
func (v WriteValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
|
func (v ConnectionInfoValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
|
||||||
var diags diag.Diagnostics
|
var diags diag.Diagnostics
|
||||||
|
|
||||||
attributeTypes := map[string]attr.Type{
|
attributeTypes := map[string]attr.Type{
|
||||||
|
|
@ -919,8 +556,8 @@ func (v WriteValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, d
|
||||||
return objVal, diags
|
return objVal, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
func (v WriteValue) Equal(o attr.Value) bool {
|
func (v ConnectionInfoValue) Equal(o attr.Value) bool {
|
||||||
other, ok := o.(WriteValue)
|
other, ok := o.(ConnectionInfoValue)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
return false
|
return false
|
||||||
|
|
@ -945,15 +582,15 @@ func (v WriteValue) Equal(o attr.Value) bool {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
func (v WriteValue) Type(ctx context.Context) attr.Type {
|
func (v ConnectionInfoValue) Type(ctx context.Context) attr.Type {
|
||||||
return WriteType{
|
return ConnectionInfoType{
|
||||||
basetypes.ObjectType{
|
basetypes.ObjectType{
|
||||||
AttrTypes: v.AttributeTypes(ctx),
|
AttrTypes: v.AttributeTypes(ctx),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (v WriteValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
|
func (v ConnectionInfoValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
|
||||||
return map[string]attr.Type{
|
return map[string]attr.Type{
|
||||||
"host": basetypes.StringType{},
|
"host": basetypes.StringType{},
|
||||||
"port": basetypes.Int64Type{},
|
"port": basetypes.Int64Type{},
|
||||||
|
|
|
||||||
|
|
@ -33,9 +33,9 @@ func mapGetInstanceResponseToModel(
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
isConnectionInfoIncomplete := resp.ConnectionInfo == nil || resp.ConnectionInfo.Write == nil ||
|
isConnectionInfoIncomplete := resp.ConnectionInfo == nil ||
|
||||||
resp.ConnectionInfo.Write.Host == nil || *resp.ConnectionInfo.Write.Host == "" ||
|
resp.ConnectionInfo.Host == nil || *resp.ConnectionInfo.Host == "" ||
|
||||||
resp.ConnectionInfo.Write.Port == nil || *resp.ConnectionInfo.Write.Port == 0
|
resp.ConnectionInfo.Port == nil || *resp.ConnectionInfo.Port == 0
|
||||||
|
|
||||||
if isConnectionInfoIncomplete {
|
if isConnectionInfoIncomplete {
|
||||||
m.ConnectionInfo = postgresflexalpharesource.NewConnectionInfoValueNull()
|
m.ConnectionInfo = postgresflexalpharesource.NewConnectionInfoValueNull()
|
||||||
|
|
@ -43,17 +43,22 @@ func mapGetInstanceResponseToModel(
|
||||||
m.ConnectionInfo = postgresflexalpharesource.NewConnectionInfoValueMust(
|
m.ConnectionInfo = postgresflexalpharesource.NewConnectionInfoValueMust(
|
||||||
postgresflexalpharesource.ConnectionInfoValue{}.AttributeTypes(ctx),
|
postgresflexalpharesource.ConnectionInfoValue{}.AttributeTypes(ctx),
|
||||||
map[string]attr.Value{
|
map[string]attr.Value{
|
||||||
"write": postgresflexalpharesource.NewWriteValueMust(
|
"host": types.StringPointerValue(resp.ConnectionInfo.Host),
|
||||||
postgresflexalpharesource.WriteValue{}.AttributeTypes(ctx),
|
"port": types.Int64PointerValue(resp.ConnectionInfo.Port),
|
||||||
map[string]attr.Value{
|
|
||||||
"host": types.StringPointerValue(resp.ConnectionInfo.Write.Host),
|
|
||||||
"port": types.Int64PointerValue(resp.ConnectionInfo.Write.Port),
|
|
||||||
},
|
|
||||||
),
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
m.ConnectionInfo.Host = types.StringValue("")
|
||||||
|
if host, ok := resp.ConnectionInfo.GetHostOk(); ok {
|
||||||
|
m.ConnectionInfo.Host = types.StringValue(host)
|
||||||
|
}
|
||||||
|
|
||||||
|
m.ConnectionInfo.Port = types.Int64Value(0)
|
||||||
|
if port, ok := resp.ConnectionInfo.GetPortOk(); ok {
|
||||||
|
m.ConnectionInfo.Port = types.Int64Value(port)
|
||||||
|
}
|
||||||
|
|
||||||
m.FlavorId = types.StringValue(resp.GetFlavorId())
|
m.FlavorId = types.StringValue(resp.GetFlavorId())
|
||||||
if m.Id.IsNull() || m.Id.IsUnknown() {
|
if m.Id.IsNull() || m.Id.IsUnknown() {
|
||||||
m.Id = utils.BuildInternalTerraformId(
|
m.Id = utils.BuildInternalTerraformId(
|
||||||
|
|
@ -159,9 +164,9 @@ func mapGetDataInstanceResponseToModel(
|
||||||
}
|
}
|
||||||
|
|
||||||
func handleConnectionInfo(ctx context.Context, m *dataSourceModel, resp *postgresflex.GetInstanceResponse) {
|
func handleConnectionInfo(ctx context.Context, m *dataSourceModel, resp *postgresflex.GetInstanceResponse) {
|
||||||
isConnectionInfoIncomplete := resp.ConnectionInfo == nil || resp.ConnectionInfo.Write == nil ||
|
isConnectionInfoIncomplete := resp.ConnectionInfo == nil ||
|
||||||
resp.ConnectionInfo.Write.Host == nil || *resp.ConnectionInfo.Write.Host == "" ||
|
resp.ConnectionInfo.Host == nil || *resp.ConnectionInfo.Host == "" ||
|
||||||
resp.ConnectionInfo.Write.Port == nil || *resp.ConnectionInfo.Write.Port == 0
|
resp.ConnectionInfo.Port == nil || *resp.ConnectionInfo.Port == 0
|
||||||
|
|
||||||
if isConnectionInfoIncomplete {
|
if isConnectionInfoIncomplete {
|
||||||
m.ConnectionInfo = postgresflexalphadatasource.NewConnectionInfoValueNull()
|
m.ConnectionInfo = postgresflexalphadatasource.NewConnectionInfoValueNull()
|
||||||
|
|
@ -169,13 +174,8 @@ func handleConnectionInfo(ctx context.Context, m *dataSourceModel, resp *postgre
|
||||||
m.ConnectionInfo = postgresflexalphadatasource.NewConnectionInfoValueMust(
|
m.ConnectionInfo = postgresflexalphadatasource.NewConnectionInfoValueMust(
|
||||||
postgresflexalphadatasource.ConnectionInfoValue{}.AttributeTypes(ctx),
|
postgresflexalphadatasource.ConnectionInfoValue{}.AttributeTypes(ctx),
|
||||||
map[string]attr.Value{
|
map[string]attr.Value{
|
||||||
"write": postgresflexalphadatasource.NewWriteValueMust(
|
"host": types.StringPointerValue(resp.ConnectionInfo.Host),
|
||||||
postgresflexalphadatasource.WriteValue{}.AttributeTypes(ctx),
|
"port": types.Int64PointerValue(resp.ConnectionInfo.Port),
|
||||||
map[string]attr.Value{
|
|
||||||
"host": types.StringPointerValue(resp.ConnectionInfo.Write.Host),
|
|
||||||
"port": types.Int64PointerValue(resp.ConnectionInfo.Write.Port),
|
|
||||||
},
|
|
||||||
),
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -328,6 +328,10 @@ func (r *instanceResource) Read(
|
||||||
|
|
||||||
ctx = core.InitProviderContext(ctx)
|
ctx = core.InitProviderContext(ctx)
|
||||||
|
|
||||||
|
// projectId := model.ProjectId.ValueString()
|
||||||
|
// region := r.providerData.GetRegionWithOverride(model.Region)
|
||||||
|
// instanceId := model.InstanceId.ValueString()
|
||||||
|
|
||||||
var projectId string
|
var projectId string
|
||||||
if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() {
|
if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() {
|
||||||
projectId = model.ProjectId.ValueString()
|
projectId = model.ProjectId.ValueString()
|
||||||
|
|
@ -431,6 +435,18 @@ func (r *instanceResource) Update(
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// if model.InstanceId.IsNull() || model.InstanceId.IsUnknown() {
|
||||||
|
// core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating instance", "instanceId is null or unknown")
|
||||||
|
// return
|
||||||
|
//}
|
||||||
|
//
|
||||||
|
// if model.ProjectId.IsNull() || model.ProjectId.IsUnknown() {
|
||||||
|
// core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating instance", "projectId is null or unknown")
|
||||||
|
// return
|
||||||
|
//}
|
||||||
|
|
||||||
|
// projectId := model.ProjectId.ValueString()
|
||||||
|
// instanceId := model.InstanceId.ValueString()
|
||||||
projectId := identityData.ProjectID.ValueString()
|
projectId := identityData.ProjectID.ValueString()
|
||||||
instanceId := identityData.InstanceID.ValueString()
|
instanceId := identityData.InstanceID.ValueString()
|
||||||
region := model.Region.ValueString()
|
region := model.Region.ValueString()
|
||||||
|
|
|
||||||
|
|
@ -34,8 +34,6 @@ func InstanceResourceSchema(ctx context.Context) schema.Schema {
|
||||||
MarkdownDescription: "The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.",
|
MarkdownDescription: "The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.",
|
||||||
},
|
},
|
||||||
"connection_info": schema.SingleNestedAttribute{
|
"connection_info": schema.SingleNestedAttribute{
|
||||||
Attributes: map[string]schema.Attribute{
|
|
||||||
"write": schema.SingleNestedAttribute{
|
|
||||||
Attributes: map[string]schema.Attribute{
|
Attributes: map[string]schema.Attribute{
|
||||||
"host": schema.StringAttribute{
|
"host": schema.StringAttribute{
|
||||||
Computed: true,
|
Computed: true,
|
||||||
|
|
@ -48,24 +46,14 @@ func InstanceResourceSchema(ctx context.Context) schema.Schema {
|
||||||
MarkdownDescription: "The port of the instance.",
|
MarkdownDescription: "The port of the instance.",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
CustomType: WriteType{
|
|
||||||
ObjectType: types.ObjectType{
|
|
||||||
AttrTypes: WriteValue{}.AttributeTypes(ctx),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Computed: true,
|
|
||||||
Description: "The DNS name and port in the instance overview",
|
|
||||||
MarkdownDescription: "The DNS name and port in the instance overview",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
CustomType: ConnectionInfoType{
|
CustomType: ConnectionInfoType{
|
||||||
ObjectType: types.ObjectType{
|
ObjectType: types.ObjectType{
|
||||||
AttrTypes: ConnectionInfoValue{}.AttributeTypes(ctx),
|
AttrTypes: ConnectionInfoValue{}.AttributeTypes(ctx),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Computed: true,
|
Computed: true,
|
||||||
Description: "The connection information of the instance",
|
Description: "The DNS name and port in the instance overview",
|
||||||
MarkdownDescription: "The connection information of the instance",
|
MarkdownDescription: "The DNS name and port in the instance overview",
|
||||||
},
|
},
|
||||||
"encryption": schema.SingleNestedAttribute{
|
"encryption": schema.SingleNestedAttribute{
|
||||||
Attributes: map[string]schema.Attribute{
|
Attributes: map[string]schema.Attribute{
|
||||||
|
|
@ -275,22 +263,40 @@ func (t ConnectionInfoType) ValueFromObject(ctx context.Context, in basetypes.Ob
|
||||||
|
|
||||||
attributes := in.Attributes()
|
attributes := in.Attributes()
|
||||||
|
|
||||||
writeAttribute, ok := attributes["write"]
|
hostAttribute, ok := attributes["host"]
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Missing",
|
"Attribute Missing",
|
||||||
`write is missing from object`)
|
`host is missing from object`)
|
||||||
|
|
||||||
return nil, diags
|
return nil, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
writeVal, ok := writeAttribute.(basetypes.ObjectValue)
|
hostVal, ok := hostAttribute.(basetypes.StringValue)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`write expected to be basetypes.ObjectValue, was: %T`, writeAttribute))
|
fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
|
||||||
|
}
|
||||||
|
|
||||||
|
portAttribute, ok := attributes["port"]
|
||||||
|
|
||||||
|
if !ok {
|
||||||
|
diags.AddError(
|
||||||
|
"Attribute Missing",
|
||||||
|
`port is missing from object`)
|
||||||
|
|
||||||
|
return nil, diags
|
||||||
|
}
|
||||||
|
|
||||||
|
portVal, ok := portAttribute.(basetypes.Int64Value)
|
||||||
|
|
||||||
|
if !ok {
|
||||||
|
diags.AddError(
|
||||||
|
"Attribute Wrong Type",
|
||||||
|
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
if diags.HasError() {
|
if diags.HasError() {
|
||||||
|
|
@ -298,7 +304,8 @@ func (t ConnectionInfoType) ValueFromObject(ctx context.Context, in basetypes.Ob
|
||||||
}
|
}
|
||||||
|
|
||||||
return ConnectionInfoValue{
|
return ConnectionInfoValue{
|
||||||
Write: writeVal,
|
Host: hostVal,
|
||||||
|
Port: portVal,
|
||||||
state: attr.ValueStateKnown,
|
state: attr.ValueStateKnown,
|
||||||
}, diags
|
}, diags
|
||||||
}
|
}
|
||||||
|
|
@ -366,22 +373,40 @@ func NewConnectionInfoValue(attributeTypes map[string]attr.Type, attributes map[
|
||||||
return NewConnectionInfoValueUnknown(), diags
|
return NewConnectionInfoValueUnknown(), diags
|
||||||
}
|
}
|
||||||
|
|
||||||
writeAttribute, ok := attributes["write"]
|
hostAttribute, ok := attributes["host"]
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Missing",
|
"Attribute Missing",
|
||||||
`write is missing from object`)
|
`host is missing from object`)
|
||||||
|
|
||||||
return NewConnectionInfoValueUnknown(), diags
|
return NewConnectionInfoValueUnknown(), diags
|
||||||
}
|
}
|
||||||
|
|
||||||
writeVal, ok := writeAttribute.(basetypes.ObjectValue)
|
hostVal, ok := hostAttribute.(basetypes.StringValue)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
diags.AddError(
|
diags.AddError(
|
||||||
"Attribute Wrong Type",
|
"Attribute Wrong Type",
|
||||||
fmt.Sprintf(`write expected to be basetypes.ObjectValue, was: %T`, writeAttribute))
|
fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
|
||||||
|
}
|
||||||
|
|
||||||
|
portAttribute, ok := attributes["port"]
|
||||||
|
|
||||||
|
if !ok {
|
||||||
|
diags.AddError(
|
||||||
|
"Attribute Missing",
|
||||||
|
`port is missing from object`)
|
||||||
|
|
||||||
|
return NewConnectionInfoValueUnknown(), diags
|
||||||
|
}
|
||||||
|
|
||||||
|
portVal, ok := portAttribute.(basetypes.Int64Value)
|
||||||
|
|
||||||
|
if !ok {
|
||||||
|
diags.AddError(
|
||||||
|
"Attribute Wrong Type",
|
||||||
|
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
|
||||||
}
|
}
|
||||||
|
|
||||||
if diags.HasError() {
|
if diags.HasError() {
|
||||||
|
|
@ -389,7 +414,8 @@ func NewConnectionInfoValue(attributeTypes map[string]attr.Type, attributes map[
|
||||||
}
|
}
|
||||||
|
|
||||||
return ConnectionInfoValue{
|
return ConnectionInfoValue{
|
||||||
Write: writeVal,
|
Host: hostVal,
|
||||||
|
Port: portVal,
|
||||||
state: attr.ValueStateKnown,
|
state: attr.ValueStateKnown,
|
||||||
}, diags
|
}, diags
|
||||||
}
|
}
|
||||||
|
|
@ -462,401 +488,12 @@ func (t ConnectionInfoType) ValueType(ctx context.Context) attr.Value {
|
||||||
var _ basetypes.ObjectValuable = ConnectionInfoValue{}
|
var _ basetypes.ObjectValuable = ConnectionInfoValue{}
|
||||||
|
|
||||||
type ConnectionInfoValue struct {
|
type ConnectionInfoValue struct {
|
||||||
Write basetypes.ObjectValue `tfsdk:"write"`
|
|
||||||
state attr.ValueState
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v ConnectionInfoValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
|
|
||||||
attrTypes := make(map[string]tftypes.Type, 1)
|
|
||||||
|
|
||||||
var val tftypes.Value
|
|
||||||
var err error
|
|
||||||
|
|
||||||
attrTypes["write"] = basetypes.ObjectType{
|
|
||||||
AttrTypes: WriteValue{}.AttributeTypes(ctx),
|
|
||||||
}.TerraformType(ctx)
|
|
||||||
|
|
||||||
objectType := tftypes.Object{AttributeTypes: attrTypes}
|
|
||||||
|
|
||||||
switch v.state {
|
|
||||||
case attr.ValueStateKnown:
|
|
||||||
vals := make(map[string]tftypes.Value, 1)
|
|
||||||
|
|
||||||
val, err = v.Write.ToTerraformValue(ctx)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return tftypes.NewValue(objectType, tftypes.UnknownValue), err
|
|
||||||
}
|
|
||||||
|
|
||||||
vals["write"] = val
|
|
||||||
|
|
||||||
if err := tftypes.ValidateValue(objectType, vals); err != nil {
|
|
||||||
return tftypes.NewValue(objectType, tftypes.UnknownValue), err
|
|
||||||
}
|
|
||||||
|
|
||||||
return tftypes.NewValue(objectType, vals), nil
|
|
||||||
case attr.ValueStateNull:
|
|
||||||
return tftypes.NewValue(objectType, nil), nil
|
|
||||||
case attr.ValueStateUnknown:
|
|
||||||
return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
|
|
||||||
default:
|
|
||||||
panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v ConnectionInfoValue) IsNull() bool {
|
|
||||||
return v.state == attr.ValueStateNull
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v ConnectionInfoValue) IsUnknown() bool {
|
|
||||||
return v.state == attr.ValueStateUnknown
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v ConnectionInfoValue) String() string {
|
|
||||||
return "ConnectionInfoValue"
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v ConnectionInfoValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
|
|
||||||
var diags diag.Diagnostics
|
|
||||||
|
|
||||||
var write basetypes.ObjectValue
|
|
||||||
|
|
||||||
if v.Write.IsNull() {
|
|
||||||
write = types.ObjectNull(
|
|
||||||
WriteValue{}.AttributeTypes(ctx),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
if v.Write.IsUnknown() {
|
|
||||||
write = types.ObjectUnknown(
|
|
||||||
WriteValue{}.AttributeTypes(ctx),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
if !v.Write.IsNull() && !v.Write.IsUnknown() {
|
|
||||||
write = types.ObjectValueMust(
|
|
||||||
WriteValue{}.AttributeTypes(ctx),
|
|
||||||
v.Write.Attributes(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
attributeTypes := map[string]attr.Type{
|
|
||||||
"write": basetypes.ObjectType{
|
|
||||||
AttrTypes: WriteValue{}.AttributeTypes(ctx),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
if v.IsNull() {
|
|
||||||
return types.ObjectNull(attributeTypes), diags
|
|
||||||
}
|
|
||||||
|
|
||||||
if v.IsUnknown() {
|
|
||||||
return types.ObjectUnknown(attributeTypes), diags
|
|
||||||
}
|
|
||||||
|
|
||||||
objVal, diags := types.ObjectValue(
|
|
||||||
attributeTypes,
|
|
||||||
map[string]attr.Value{
|
|
||||||
"write": write,
|
|
||||||
})
|
|
||||||
|
|
||||||
return objVal, diags
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v ConnectionInfoValue) Equal(o attr.Value) bool {
|
|
||||||
other, ok := o.(ConnectionInfoValue)
|
|
||||||
|
|
||||||
if !ok {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
if v.state != other.state {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
if v.state != attr.ValueStateKnown {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
if !v.Write.Equal(other.Write) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v ConnectionInfoValue) Type(ctx context.Context) attr.Type {
|
|
||||||
return ConnectionInfoType{
|
|
||||||
basetypes.ObjectType{
|
|
||||||
AttrTypes: v.AttributeTypes(ctx),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v ConnectionInfoValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
|
|
||||||
return map[string]attr.Type{
|
|
||||||
"write": basetypes.ObjectType{
|
|
||||||
AttrTypes: WriteValue{}.AttributeTypes(ctx),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ basetypes.ObjectTypable = WriteType{}
|
|
||||||
|
|
||||||
type WriteType struct {
|
|
||||||
basetypes.ObjectType
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t WriteType) Equal(o attr.Type) bool {
|
|
||||||
other, ok := o.(WriteType)
|
|
||||||
|
|
||||||
if !ok {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
return t.ObjectType.Equal(other.ObjectType)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t WriteType) String() string {
|
|
||||||
return "WriteType"
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t WriteType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
|
|
||||||
var diags diag.Diagnostics
|
|
||||||
|
|
||||||
attributes := in.Attributes()
|
|
||||||
|
|
||||||
hostAttribute, ok := attributes["host"]
|
|
||||||
|
|
||||||
if !ok {
|
|
||||||
diags.AddError(
|
|
||||||
"Attribute Missing",
|
|
||||||
`host is missing from object`)
|
|
||||||
|
|
||||||
return nil, diags
|
|
||||||
}
|
|
||||||
|
|
||||||
hostVal, ok := hostAttribute.(basetypes.StringValue)
|
|
||||||
|
|
||||||
if !ok {
|
|
||||||
diags.AddError(
|
|
||||||
"Attribute Wrong Type",
|
|
||||||
fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
|
|
||||||
}
|
|
||||||
|
|
||||||
portAttribute, ok := attributes["port"]
|
|
||||||
|
|
||||||
if !ok {
|
|
||||||
diags.AddError(
|
|
||||||
"Attribute Missing",
|
|
||||||
`port is missing from object`)
|
|
||||||
|
|
||||||
return nil, diags
|
|
||||||
}
|
|
||||||
|
|
||||||
portVal, ok := portAttribute.(basetypes.Int64Value)
|
|
||||||
|
|
||||||
if !ok {
|
|
||||||
diags.AddError(
|
|
||||||
"Attribute Wrong Type",
|
|
||||||
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
|
|
||||||
}
|
|
||||||
|
|
||||||
if diags.HasError() {
|
|
||||||
return nil, diags
|
|
||||||
}
|
|
||||||
|
|
||||||
return WriteValue{
|
|
||||||
Host: hostVal,
|
|
||||||
Port: portVal,
|
|
||||||
state: attr.ValueStateKnown,
|
|
||||||
}, diags
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewWriteValueNull() WriteValue {
|
|
||||||
return WriteValue{
|
|
||||||
state: attr.ValueStateNull,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewWriteValueUnknown() WriteValue {
|
|
||||||
return WriteValue{
|
|
||||||
state: attr.ValueStateUnknown,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewWriteValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (WriteValue, diag.Diagnostics) {
|
|
||||||
var diags diag.Diagnostics
|
|
||||||
|
|
||||||
// Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
|
|
||||||
ctx := context.Background()
|
|
||||||
|
|
||||||
for name, attributeType := range attributeTypes {
|
|
||||||
attribute, ok := attributes[name]
|
|
||||||
|
|
||||||
if !ok {
|
|
||||||
diags.AddError(
|
|
||||||
"Missing WriteValue Attribute Value",
|
|
||||||
"While creating a WriteValue value, a missing attribute value was detected. "+
|
|
||||||
"A WriteValue must contain values for all attributes, even if null or unknown. "+
|
|
||||||
"This is always an issue with the provider and should be reported to the provider developers.\n\n"+
|
|
||||||
fmt.Sprintf("WriteValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
|
|
||||||
)
|
|
||||||
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if !attributeType.Equal(attribute.Type(ctx)) {
|
|
||||||
diags.AddError(
|
|
||||||
"Invalid WriteValue Attribute Type",
|
|
||||||
"While creating a WriteValue value, an invalid attribute value was detected. "+
|
|
||||||
"A WriteValue must use a matching attribute type for the value. "+
|
|
||||||
"This is always an issue with the provider and should be reported to the provider developers.\n\n"+
|
|
||||||
fmt.Sprintf("WriteValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
|
|
||||||
fmt.Sprintf("WriteValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for name := range attributes {
|
|
||||||
_, ok := attributeTypes[name]
|
|
||||||
|
|
||||||
if !ok {
|
|
||||||
diags.AddError(
|
|
||||||
"Extra WriteValue Attribute Value",
|
|
||||||
"While creating a WriteValue value, an extra attribute value was detected. "+
|
|
||||||
"A WriteValue must not contain values beyond the expected attribute types. "+
|
|
||||||
"This is always an issue with the provider and should be reported to the provider developers.\n\n"+
|
|
||||||
fmt.Sprintf("Extra WriteValue Attribute Name: %s", name),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if diags.HasError() {
|
|
||||||
return NewWriteValueUnknown(), diags
|
|
||||||
}
|
|
||||||
|
|
||||||
hostAttribute, ok := attributes["host"]
|
|
||||||
|
|
||||||
if !ok {
|
|
||||||
diags.AddError(
|
|
||||||
"Attribute Missing",
|
|
||||||
`host is missing from object`)
|
|
||||||
|
|
||||||
return NewWriteValueUnknown(), diags
|
|
||||||
}
|
|
||||||
|
|
||||||
hostVal, ok := hostAttribute.(basetypes.StringValue)
|
|
||||||
|
|
||||||
if !ok {
|
|
||||||
diags.AddError(
|
|
||||||
"Attribute Wrong Type",
|
|
||||||
fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
|
|
||||||
}
|
|
||||||
|
|
||||||
portAttribute, ok := attributes["port"]
|
|
||||||
|
|
||||||
if !ok {
|
|
||||||
diags.AddError(
|
|
||||||
"Attribute Missing",
|
|
||||||
`port is missing from object`)
|
|
||||||
|
|
||||||
return NewWriteValueUnknown(), diags
|
|
||||||
}
|
|
||||||
|
|
||||||
portVal, ok := portAttribute.(basetypes.Int64Value)
|
|
||||||
|
|
||||||
if !ok {
|
|
||||||
diags.AddError(
|
|
||||||
"Attribute Wrong Type",
|
|
||||||
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
|
|
||||||
}
|
|
||||||
|
|
||||||
if diags.HasError() {
|
|
||||||
return NewWriteValueUnknown(), diags
|
|
||||||
}
|
|
||||||
|
|
||||||
return WriteValue{
|
|
||||||
Host: hostVal,
|
|
||||||
Port: portVal,
|
|
||||||
state: attr.ValueStateKnown,
|
|
||||||
}, diags
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewWriteValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) WriteValue {
|
|
||||||
object, diags := NewWriteValue(attributeTypes, attributes)
|
|
||||||
|
|
||||||
if diags.HasError() {
|
|
||||||
// This could potentially be added to the diag package.
|
|
||||||
diagsStrings := make([]string, 0, len(diags))
|
|
||||||
|
|
||||||
for _, diagnostic := range diags {
|
|
||||||
diagsStrings = append(diagsStrings, fmt.Sprintf(
|
|
||||||
"%s | %s | %s",
|
|
||||||
diagnostic.Severity(),
|
|
||||||
diagnostic.Summary(),
|
|
||||||
diagnostic.Detail()))
|
|
||||||
}
|
|
||||||
|
|
||||||
panic("NewWriteValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
|
|
||||||
}
|
|
||||||
|
|
||||||
return object
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t WriteType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
|
|
||||||
if in.Type() == nil {
|
|
||||||
return NewWriteValueNull(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if !in.Type().Equal(t.TerraformType(ctx)) {
|
|
||||||
return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
|
|
||||||
}
|
|
||||||
|
|
||||||
if !in.IsKnown() {
|
|
||||||
return NewWriteValueUnknown(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if in.IsNull() {
|
|
||||||
return NewWriteValueNull(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
attributes := map[string]attr.Value{}
|
|
||||||
|
|
||||||
val := map[string]tftypes.Value{}
|
|
||||||
|
|
||||||
err := in.As(&val)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
for k, v := range val {
|
|
||||||
a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
attributes[k] = a
|
|
||||||
}
|
|
||||||
|
|
||||||
return NewWriteValueMust(WriteValue{}.AttributeTypes(ctx), attributes), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t WriteType) ValueType(ctx context.Context) attr.Value {
|
|
||||||
return WriteValue{}
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ basetypes.ObjectValuable = WriteValue{}
|
|
||||||
|
|
||||||
type WriteValue struct {
|
|
||||||
Host basetypes.StringValue `tfsdk:"host"`
|
Host basetypes.StringValue `tfsdk:"host"`
|
||||||
Port basetypes.Int64Value `tfsdk:"port"`
|
Port basetypes.Int64Value `tfsdk:"port"`
|
||||||
state attr.ValueState
|
state attr.ValueState
|
||||||
}
|
}
|
||||||
|
|
||||||
func (v WriteValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
|
func (v ConnectionInfoValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
|
||||||
attrTypes := make(map[string]tftypes.Type, 2)
|
attrTypes := make(map[string]tftypes.Type, 2)
|
||||||
|
|
||||||
var val tftypes.Value
|
var val tftypes.Value
|
||||||
|
|
@ -901,19 +538,19 @@ func (v WriteValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (v WriteValue) IsNull() bool {
|
func (v ConnectionInfoValue) IsNull() bool {
|
||||||
return v.state == attr.ValueStateNull
|
return v.state == attr.ValueStateNull
|
||||||
}
|
}
|
||||||
|
|
||||||
func (v WriteValue) IsUnknown() bool {
|
func (v ConnectionInfoValue) IsUnknown() bool {
|
||||||
return v.state == attr.ValueStateUnknown
|
return v.state == attr.ValueStateUnknown
|
||||||
}
|
}
|
||||||
|
|
||||||
func (v WriteValue) String() string {
|
func (v ConnectionInfoValue) String() string {
|
||||||
return "WriteValue"
|
return "ConnectionInfoValue"
|
||||||
}
|
}
|
||||||
|
|
||||||
func (v WriteValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
|
func (v ConnectionInfoValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
|
||||||
var diags diag.Diagnostics
|
var diags diag.Diagnostics
|
||||||
|
|
||||||
attributeTypes := map[string]attr.Type{
|
attributeTypes := map[string]attr.Type{
|
||||||
|
|
@ -939,8 +576,8 @@ func (v WriteValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, d
|
||||||
return objVal, diags
|
return objVal, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
func (v WriteValue) Equal(o attr.Value) bool {
|
func (v ConnectionInfoValue) Equal(o attr.Value) bool {
|
||||||
other, ok := o.(WriteValue)
|
other, ok := o.(ConnectionInfoValue)
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
return false
|
return false
|
||||||
|
|
@ -965,15 +602,15 @@ func (v WriteValue) Equal(o attr.Value) bool {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
func (v WriteValue) Type(ctx context.Context) attr.Type {
|
func (v ConnectionInfoValue) Type(ctx context.Context) attr.Type {
|
||||||
return WriteType{
|
return ConnectionInfoType{
|
||||||
basetypes.ObjectType{
|
basetypes.ObjectType{
|
||||||
AttrTypes: v.AttributeTypes(ctx),
|
AttrTypes: v.AttributeTypes(ctx),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (v WriteValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
|
func (v ConnectionInfoValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
|
||||||
return map[string]attr.Type{
|
return map[string]attr.Type{
|
||||||
"host": basetypes.StringType{},
|
"host": basetypes.StringType{},
|
||||||
"port": basetypes.Int64Type{},
|
"port": basetypes.Int64Type{},
|
||||||
|
|
|
||||||
|
|
@ -30,10 +30,9 @@ var testInstances []string
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
sweeperName := fmt.Sprintf("%s_%s", pfx, "sweeper")
|
sweeperName := fmt.Sprintf("%s_%s", pfx, "sweeper")
|
||||||
resource.AddTestSweepers(
|
resource.AddTestSweepers(sweeperName, &resource.Sweeper{
|
||||||
sweeperName, &resource.Sweeper{
|
|
||||||
Name: sweeperName,
|
Name: sweeperName,
|
||||||
F: func(_ string) error { // region is passed by the testing framework
|
F: func(region string) error {
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
apiClientConfigOptions := []config.ConfigurationOption{}
|
apiClientConfigOptions := []config.ConfigurationOption{}
|
||||||
apiClient, err := postgresflexalpha2.NewAPIClient(apiClientConfigOptions...)
|
apiClient, err := postgresflexalpha2.NewAPIClient(apiClientConfigOptions...)
|
||||||
|
|
@ -52,12 +51,7 @@ func init() {
|
||||||
if strings.HasPrefix(inst.GetName(), "tf-acc-") {
|
if strings.HasPrefix(inst.GetName(), "tf-acc-") {
|
||||||
for _, item := range testInstances {
|
for _, item := range testInstances {
|
||||||
if inst.GetName() == item {
|
if inst.GetName() == item {
|
||||||
delErr := apiClient.DeleteInstanceRequestExecute(
|
delErr := apiClient.DeleteInstanceRequestExecute(ctx, testutils.ProjectId, testutils.Region, inst.GetId())
|
||||||
ctx,
|
|
||||||
testutils.ProjectId,
|
|
||||||
testutils.Region,
|
|
||||||
inst.GetId(),
|
|
||||||
)
|
|
||||||
if delErr != nil {
|
if delErr != nil {
|
||||||
// TODO: maybe just warn?
|
// TODO: maybe just warn?
|
||||||
log.Fatalln(delErr)
|
log.Fatalln(delErr)
|
||||||
|
|
@ -68,8 +62,7 @@ func init() {
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
},
|
},
|
||||||
},
|
})
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestInstanceResourceSchema(t *testing.T) {
|
func TestInstanceResourceSchema(t *testing.T) {
|
||||||
|
|
@ -202,8 +195,7 @@ func TestAccInstance(t *testing.T) {
|
||||||
updSizeData := exData
|
updSizeData := exData
|
||||||
updSizeData.Size = 25
|
updSizeData.Size = 25
|
||||||
|
|
||||||
resource.ParallelTest(
|
resource.ParallelTest(t, resource.TestCase{
|
||||||
t, resource.TestCase{
|
|
||||||
PreCheck: func() {
|
PreCheck: func() {
|
||||||
testAccPreCheck(t)
|
testAccPreCheck(t)
|
||||||
t.Logf(" ... working on instance %s", exData.TfName)
|
t.Logf(" ... working on instance %s", exData.TfName)
|
||||||
|
|
@ -218,11 +210,7 @@ func TestAccInstance(t *testing.T) {
|
||||||
exData,
|
exData,
|
||||||
),
|
),
|
||||||
Check: resource.ComposeAggregateTestCheckFunc(
|
Check: resource.ComposeAggregateTestCheckFunc(
|
||||||
resource.TestCheckResourceAttr(
|
resource.TestCheckResourceAttr(testutils.ResStr(pfx, "instance", exData.TfName), "name", exData.Name),
|
||||||
testutils.ResStr(pfx, "instance", exData.TfName),
|
|
||||||
"name",
|
|
||||||
exData.Name,
|
|
||||||
),
|
|
||||||
resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "instance", exData.TfName), "id"),
|
resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "instance", exData.TfName), "id"),
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
|
|
@ -233,11 +221,7 @@ func TestAccInstance(t *testing.T) {
|
||||||
updNameData,
|
updNameData,
|
||||||
),
|
),
|
||||||
Check: resource.ComposeTestCheckFunc(
|
Check: resource.ComposeTestCheckFunc(
|
||||||
resource.TestCheckResourceAttr(
|
resource.TestCheckResourceAttr(testutils.ResStr(pfx, "instance", exData.TfName), "name", updNameData.Name),
|
||||||
testutils.ResStr(pfx, "instance", exData.TfName),
|
|
||||||
"name",
|
|
||||||
updNameData.Name,
|
|
||||||
),
|
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
// Update size and verify
|
// Update size and verify
|
||||||
|
|
@ -261,8 +245,7 @@ func TestAccInstance(t *testing.T) {
|
||||||
// ImportStateVerify: true,
|
// ImportStateVerify: true,
|
||||||
// },
|
// },
|
||||||
},
|
},
|
||||||
},
|
})
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestAccInstanceWithUsers(t *testing.T) {
|
func TestAccInstanceWithUsers(t *testing.T) {
|
||||||
|
|
@ -277,8 +260,7 @@ func TestAccInstanceWithUsers(t *testing.T) {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
resource.ParallelTest(
|
resource.ParallelTest(t, resource.TestCase{
|
||||||
t, resource.TestCase{
|
|
||||||
PreCheck: func() {
|
PreCheck: func() {
|
||||||
testAccPreCheck(t)
|
testAccPreCheck(t)
|
||||||
t.Logf(" ... working on instance %s", data.TfName)
|
t.Logf(" ... working on instance %s", data.TfName)
|
||||||
|
|
@ -293,19 +275,14 @@ func TestAccInstanceWithUsers(t *testing.T) {
|
||||||
data,
|
data,
|
||||||
),
|
),
|
||||||
Check: resource.ComposeAggregateTestCheckFunc(
|
Check: resource.ComposeAggregateTestCheckFunc(
|
||||||
resource.TestCheckResourceAttr(
|
resource.TestCheckResourceAttr(testutils.ResStr(pfx, "instance", data.TfName), "name", data.Name),
|
||||||
testutils.ResStr(pfx, "instance", data.TfName),
|
|
||||||
"name",
|
|
||||||
data.Name,
|
|
||||||
),
|
|
||||||
resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "instance", data.TfName), "id"),
|
resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "instance", data.TfName), "id"),
|
||||||
resource.TestCheckResourceAttr(testutils.ResStr(pfx, "user", userName), "name", userName),
|
resource.TestCheckResourceAttr(testutils.ResStr(pfx, "user", userName), "name", userName),
|
||||||
resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "user", userName), "id"),
|
resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "user", userName), "id"),
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
})
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestAccInstanceWithDatabases(t *testing.T) {
|
func TestAccInstanceWithDatabases(t *testing.T) {
|
||||||
|
|
@ -329,8 +306,7 @@ func TestAccInstanceWithDatabases(t *testing.T) {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
resource.ParallelTest(
|
resource.ParallelTest(t, resource.TestCase{
|
||||||
t, resource.TestCase{
|
|
||||||
PreCheck: func() {
|
PreCheck: func() {
|
||||||
testAccPreCheck(t)
|
testAccPreCheck(t)
|
||||||
t.Logf(" ... working on instance %s", data.TfName)
|
t.Logf(" ... working on instance %s", data.TfName)
|
||||||
|
|
@ -345,11 +321,7 @@ func TestAccInstanceWithDatabases(t *testing.T) {
|
||||||
data,
|
data,
|
||||||
),
|
),
|
||||||
Check: resource.ComposeAggregateTestCheckFunc(
|
Check: resource.ComposeAggregateTestCheckFunc(
|
||||||
resource.TestCheckResourceAttr(
|
resource.TestCheckResourceAttr(testutils.ResStr(pfx, "instance", data.TfName), "name", data.Name),
|
||||||
testutils.ResStr(pfx, "instance", data.TfName),
|
|
||||||
"name",
|
|
||||||
data.Name,
|
|
||||||
),
|
|
||||||
resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "instance", data.TfName), "id"),
|
resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "instance", data.TfName), "id"),
|
||||||
resource.TestCheckResourceAttr(testutils.ResStr(pfx, "user", userName), "name", userName),
|
resource.TestCheckResourceAttr(testutils.ResStr(pfx, "user", userName), "name", userName),
|
||||||
resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "user", userName), "id"),
|
resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "user", userName), "id"),
|
||||||
|
|
@ -359,8 +331,7 @@ func TestAccInstanceWithDatabases(t *testing.T) {
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
})
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// func setupMockServer() *httptest.Server {
|
// func setupMockServer() *httptest.Server {
|
||||||
|
|
@ -490,7 +461,7 @@ func TestAccInstanceWithDatabases(t *testing.T) {
|
||||||
// "project_id": testutils.ProjectId,
|
// "project_id": testutils.ProjectId,
|
||||||
//}
|
//}
|
||||||
//
|
//
|
||||||
// func configResources(backupSchedule string, _ *string) string {
|
//func configResources(backupSchedule string, _ *string) string {
|
||||||
// return fmt.Sprintf(
|
// return fmt.Sprintf(
|
||||||
// `
|
// `
|
||||||
// %s
|
// %s
|
||||||
|
|
@ -564,7 +535,7 @@ func TestAccInstanceWithDatabases(t *testing.T) {
|
||||||
// )
|
// )
|
||||||
//}
|
//}
|
||||||
//
|
//
|
||||||
// func TestAccPostgresFlexFlexResource(t *testing.T) {
|
//func TestAccPostgresFlexFlexResource(t *testing.T) {
|
||||||
// resource.ParallelTest(
|
// resource.ParallelTest(
|
||||||
// t, resource.TestCase{
|
// t, resource.TestCase{
|
||||||
// ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
// ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
||||||
|
|
@ -983,7 +954,7 @@ func TestAccInstanceWithDatabases(t *testing.T) {
|
||||||
// )
|
// )
|
||||||
//}
|
//}
|
||||||
//
|
//
|
||||||
// func testAccCheckPostgresFlexDestroy(s *terraform.State) error {
|
//func testAccCheckPostgresFlexDestroy(s *terraform.State) error {
|
||||||
// ctx := context.Background()
|
// ctx := context.Background()
|
||||||
// var client *postgresflex.APIClient
|
// var client *postgresflex.APIClient
|
||||||
// var err error
|
// var err error
|
||||||
|
|
|
||||||
|
|
@ -252,6 +252,7 @@ func (r *userResource) Create(
|
||||||
model.UserId = types.Int64Value(id)
|
model.UserId = types.Int64Value(id)
|
||||||
model.Password = types.StringValue(userResp.GetPassword())
|
model.Password = types.StringValue(userResp.GetPassword())
|
||||||
model.Status = types.StringValue(userResp.GetStatus())
|
model.Status = types.StringValue(userResp.GetStatus())
|
||||||
|
//model.ConnectionString = types.StringValue(userResp.GetConnectionString())
|
||||||
|
|
||||||
waitResp, err := postgresflexalphaWait.GetUserByIdWaitHandler(
|
waitResp, err := postgresflexalphaWait.GetUserByIdWaitHandler(
|
||||||
ctx,
|
ctx,
|
||||||
|
|
|
||||||
|
|
@ -143,6 +143,7 @@ func (d *databaseDataSource) Read(ctx context.Context, req datasource.ReadReques
|
||||||
resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
|
resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
|
||||||
|
|
||||||
tflog.Info(ctx, "SQL Server Flex beta database read")
|
tflog.Info(ctx, "SQL Server Flex beta database read")
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// handleReadError centralizes API error handling for the Read operation.
|
// handleReadError centralizes API error handling for the Read operation.
|
||||||
|
|
|
||||||
|
|
@ -36,6 +36,10 @@ var (
|
||||||
|
|
||||||
// Define errors
|
// Define errors
|
||||||
errDatabaseNotFound = errors.New("database not found")
|
errDatabaseNotFound = errors.New("database not found")
|
||||||
|
|
||||||
|
// Error message constants
|
||||||
|
extractErrorSummary = "extracting failed"
|
||||||
|
extractErrorMessage = "Extracting identity data: %v"
|
||||||
)
|
)
|
||||||
|
|
||||||
func NewDatabaseResource() resource.Resource {
|
func NewDatabaseResource() resource.Resource {
|
||||||
|
|
@ -182,6 +186,26 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques
|
||||||
payLoad.Name = data.Name.ValueStringPointer()
|
payLoad.Name = data.Name.ValueStringPointer()
|
||||||
payLoad.Owner = data.Owner.ValueStringPointer()
|
payLoad.Owner = data.Owner.ValueStringPointer()
|
||||||
|
|
||||||
|
//_, err := wait.WaitForUserWaitHandler(
|
||||||
|
// ctx,
|
||||||
|
// r.client,
|
||||||
|
// projectId,
|
||||||
|
// instanceId,
|
||||||
|
// region,
|
||||||
|
// data.Owner.ValueString(),
|
||||||
|
//).
|
||||||
|
// SetSleepBeforeWait(10 * time.Second).
|
||||||
|
// WaitWithContext(ctx)
|
||||||
|
//if err != nil {
|
||||||
|
// core.LogAndAddError(
|
||||||
|
// ctx,
|
||||||
|
// &resp.Diagnostics,
|
||||||
|
// createErr,
|
||||||
|
// fmt.Sprintf("Calling API: %v", err),
|
||||||
|
// )
|
||||||
|
// return
|
||||||
|
//}
|
||||||
|
|
||||||
createResp, err := r.client.CreateDatabaseRequest(ctx, projectId, region, instanceId).
|
createResp, err := r.client.CreateDatabaseRequest(ctx, projectId, region, instanceId).
|
||||||
CreateDatabaseRequestPayload(payLoad).
|
CreateDatabaseRequestPayload(payLoad).
|
||||||
Execute()
|
Execute()
|
||||||
|
|
@ -427,9 +451,7 @@ func (r *databaseResource) Delete(ctx context.Context, req resource.DeleteReques
|
||||||
&resp.Diagnostics,
|
&resp.Diagnostics,
|
||||||
"Error deleting database",
|
"Error deleting database",
|
||||||
fmt.Sprintf(
|
fmt.Sprintf(
|
||||||
"Calling API: %v\nname: %s, region: %s, instanceId: %s", err, databaseName, region, instanceId,
|
"Calling API: %v\nname: %s, region: %s, instanceId: %s", err, databaseName, region, instanceId))
|
||||||
),
|
|
||||||
)
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -446,6 +468,7 @@ func (r *databaseResource) ModifyPlan(
|
||||||
req resource.ModifyPlanRequest,
|
req resource.ModifyPlanRequest,
|
||||||
resp *resource.ModifyPlanResponse,
|
resp *resource.ModifyPlanResponse,
|
||||||
) { // nolint:gocritic // function signature required by Terraform
|
) { // nolint:gocritic // function signature required by Terraform
|
||||||
|
|
||||||
// skip initial empty configuration to avoid follow-up errors
|
// skip initial empty configuration to avoid follow-up errors
|
||||||
if req.Config.Raw.IsNull() {
|
if req.Config.Raw.IsNull() {
|
||||||
return
|
return
|
||||||
|
|
|
||||||
|
|
@ -247,6 +247,7 @@ func toCreatePayload(
|
||||||
conversion.StringValueToPointer(model.Version),
|
conversion.StringValueToPointer(model.Version),
|
||||||
),
|
),
|
||||||
}, nil
|
}, nil
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func toUpdatePayload(
|
func toUpdatePayload(
|
||||||
|
|
|
||||||
|
|
@ -54,7 +54,7 @@ type InstanceResourceIdentityModel struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *instanceResource) Metadata(
|
func (r *instanceResource) Metadata(
|
||||||
_ context.Context,
|
ctx context.Context,
|
||||||
req resource.MetadataRequest,
|
req resource.MetadataRequest,
|
||||||
resp *resource.MetadataResponse,
|
resp *resource.MetadataResponse,
|
||||||
) {
|
) {
|
||||||
|
|
@ -64,7 +64,7 @@ func (r *instanceResource) Metadata(
|
||||||
//go:embed planModifiers.yaml
|
//go:embed planModifiers.yaml
|
||||||
var modifiersFileByte []byte
|
var modifiersFileByte []byte
|
||||||
|
|
||||||
func (r *instanceResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
|
func (r *instanceResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
|
||||||
s := sqlserverflexalphaResGen.InstanceResourceSchema(ctx)
|
s := sqlserverflexalphaResGen.InstanceResourceSchema(ctx)
|
||||||
|
|
||||||
fields, err := utils.ReadModifiersConfig(modifiersFileByte)
|
fields, err := utils.ReadModifiersConfig(modifiersFileByte)
|
||||||
|
|
@ -147,6 +147,7 @@ func (r *instanceResource) ModifyPlan(
|
||||||
req resource.ModifyPlanRequest,
|
req resource.ModifyPlanRequest,
|
||||||
resp *resource.ModifyPlanResponse,
|
resp *resource.ModifyPlanResponse,
|
||||||
) { // nolint:gocritic // function signature required by Terraform
|
) { // nolint:gocritic // function signature required by Terraform
|
||||||
|
|
||||||
// skip initial empty configuration to avoid follow-up errors
|
// skip initial empty configuration to avoid follow-up errors
|
||||||
if req.Config.Raw.IsNull() {
|
if req.Config.Raw.IsNull() {
|
||||||
return
|
return
|
||||||
|
|
|
||||||
|
|
@ -288,8 +288,8 @@ func TestAccInstanceNoEncryption(t *testing.T) {
|
||||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption"),
|
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption"),
|
||||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_id"),
|
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_id"),
|
||||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_version"),
|
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_version"),
|
||||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"),
|
//resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"),
|
||||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"),
|
//resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"),
|
||||||
|
|
||||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.access_scope"),
|
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.access_scope"),
|
||||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.acl"),
|
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.acl"),
|
||||||
|
|
@ -389,8 +389,8 @@ func TestAccInstanceEncryption(t *testing.T) {
|
||||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption"),
|
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption"),
|
||||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_id"),
|
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_id"),
|
||||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_version"),
|
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_version"),
|
||||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"),
|
//resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"),
|
||||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"),
|
//resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"),
|
||||||
|
|
||||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.access_scope"),
|
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.access_scope"),
|
||||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.acl"),
|
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.acl"),
|
||||||
|
|
|
||||||
|
|
@ -20,6 +20,8 @@ import (
|
||||||
|
|
||||||
var _ datasource.DataSource = (*userDataSource)(nil)
|
var _ datasource.DataSource = (*userDataSource)(nil)
|
||||||
|
|
||||||
|
const errorPrefix = "[sqlserverflexalpha - User]"
|
||||||
|
|
||||||
func NewUserDataSource() datasource.DataSource {
|
func NewUserDataSource() datasource.DataSource {
|
||||||
return &userDataSource{}
|
return &userDataSource{}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -20,6 +20,7 @@ import (
|
||||||
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
|
||||||
|
sqlserverflexalphagen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/user/resources_gen"
|
||||||
sqlserverflexalphaUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils"
|
sqlserverflexalphaUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils"
|
||||||
sqlserverflexalphaWait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexalpha"
|
sqlserverflexalphaWait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexalpha"
|
||||||
|
|
||||||
|
|
@ -58,7 +59,7 @@ type userResource struct {
|
||||||
providerData core.ProviderData
|
providerData core.ProviderData
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *userResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
|
func (r *userResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
|
||||||
resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_user"
|
resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_user"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -117,7 +118,7 @@ var modifiersFileByte []byte
|
||||||
|
|
||||||
// Schema defines the schema for the resource.
|
// Schema defines the schema for the resource.
|
||||||
func (r *userResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
|
func (r *userResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
|
||||||
s := sqlserverflexalphaResGen.UserResourceSchema(ctx)
|
s := sqlserverflexalphagen.UserResourceSchema(ctx)
|
||||||
|
|
||||||
fields, err := utils.ReadModifiersConfig(modifiersFileByte)
|
fields, err := utils.ReadModifiersConfig(modifiersFileByte)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
@ -469,6 +470,7 @@ func (r *userResource) Delete(
|
||||||
// Delete existing record set
|
// Delete existing record set
|
||||||
_, err = sqlserverflexalphaWait.DeleteUserWaitHandler(ctx, r.client, projectId, region, instanceId, userId).
|
_, err = sqlserverflexalphaWait.DeleteUserWaitHandler(ctx, r.client, projectId, region, instanceId, userId).
|
||||||
WaitWithContext(ctx)
|
WaitWithContext(ctx)
|
||||||
|
// err := r.client.DeleteUserRequest(ctx, arg.projectId, arg.region, arg.instanceId, userId).Execute()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
core.LogAndAddError(ctx, &resp.Diagnostics, "User Delete Error", fmt.Sprintf("Calling API: %v", err))
|
core.LogAndAddError(ctx, &resp.Diagnostics, "User Delete Error", fmt.Sprintf("Calling API: %v", err))
|
||||||
return
|
return
|
||||||
|
|
|
||||||
|
|
@ -143,6 +143,7 @@ func (d *databaseDataSource) Read(ctx context.Context, req datasource.ReadReques
|
||||||
resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
|
resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
|
||||||
|
|
||||||
tflog.Info(ctx, "SQL Server Flex beta database read")
|
tflog.Info(ctx, "SQL Server Flex beta database read")
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// handleReadError centralizes API error handling for the Read operation.
|
// handleReadError centralizes API error handling for the Read operation.
|
||||||
|
|
|
||||||
|
|
@ -36,6 +36,10 @@ var (
|
||||||
|
|
||||||
// Define errors
|
// Define errors
|
||||||
errDatabaseNotFound = errors.New("database not found")
|
errDatabaseNotFound = errors.New("database not found")
|
||||||
|
|
||||||
|
// Error message constants
|
||||||
|
extractErrorSummary = "extracting failed"
|
||||||
|
extractErrorMessage = "Extracting identity data: %v"
|
||||||
)
|
)
|
||||||
|
|
||||||
func NewDatabaseResource() resource.Resource {
|
func NewDatabaseResource() resource.Resource {
|
||||||
|
|
@ -426,9 +430,7 @@ func (r *databaseResource) Delete(ctx context.Context, req resource.DeleteReques
|
||||||
&resp.Diagnostics,
|
&resp.Diagnostics,
|
||||||
"Error deleting database",
|
"Error deleting database",
|
||||||
fmt.Sprintf(
|
fmt.Sprintf(
|
||||||
"Calling API: %v\nname: %s, region: %s, instanceId: %s", err, databaseName, region, instanceId,
|
"Calling API: %v\nname: %s, region: %s, instanceId: %s", err, databaseName, region, instanceId))
|
||||||
),
|
|
||||||
)
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -447,6 +449,7 @@ func (r *databaseResource) ModifyPlan(
|
||||||
req resource.ModifyPlanRequest,
|
req resource.ModifyPlanRequest,
|
||||||
resp *resource.ModifyPlanResponse,
|
resp *resource.ModifyPlanResponse,
|
||||||
) { // nolint:gocritic // function signature required by Terraform
|
) { // nolint:gocritic // function signature required by Terraform
|
||||||
|
|
||||||
// skip initial empty configuration to avoid follow-up errors
|
// skip initial empty configuration to avoid follow-up errors
|
||||||
if req.Config.Raw.IsNull() {
|
if req.Config.Raw.IsNull() {
|
||||||
return
|
return
|
||||||
|
|
@ -556,4 +559,45 @@ func (r *databaseResource) ImportState(
|
||||||
tflog.Info(ctx, "Sqlserverflexbeta database state imported")
|
tflog.Info(ctx, "Sqlserverflexbeta database state imported")
|
||||||
}
|
}
|
||||||
|
|
||||||
// extractIdentityData extracts essential identifiers from the resource model, falling back to the identity mode
|
// extractIdentityData extracts essential identifiers from the resource model, falling back to the identity model.
|
||||||
|
func (r *databaseResource) extractIdentityData(
|
||||||
|
model resourceModel,
|
||||||
|
identity DatabaseResourceIdentityModel,
|
||||||
|
) (projectId, region, instanceId, databaseName string, err error) {
|
||||||
|
if !model.Name.IsNull() && !model.Name.IsUnknown() {
|
||||||
|
databaseName = model.Name.ValueString()
|
||||||
|
} else {
|
||||||
|
if identity.DatabaseName.IsNull() || identity.DatabaseName.IsUnknown() {
|
||||||
|
return "", "", "", "", fmt.Errorf("database_name not found in config")
|
||||||
|
}
|
||||||
|
databaseName = identity.DatabaseName.ValueString()
|
||||||
|
}
|
||||||
|
|
||||||
|
if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() {
|
||||||
|
projectId = model.ProjectId.ValueString()
|
||||||
|
} else {
|
||||||
|
if identity.ProjectID.IsNull() || identity.ProjectID.IsUnknown() {
|
||||||
|
return "", "", "", "", fmt.Errorf("project_id not found in config")
|
||||||
|
}
|
||||||
|
projectId = identity.ProjectID.ValueString()
|
||||||
|
}
|
||||||
|
|
||||||
|
if !model.Region.IsNull() && !model.Region.IsUnknown() {
|
||||||
|
region = r.providerData.GetRegionWithOverride(model.Region)
|
||||||
|
} else {
|
||||||
|
if identity.Region.IsNull() || identity.Region.IsUnknown() {
|
||||||
|
return "", "", "", "", fmt.Errorf("region not found in config")
|
||||||
|
}
|
||||||
|
region = r.providerData.GetRegionWithOverride(identity.Region)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !model.InstanceId.IsNull() && !model.InstanceId.IsUnknown() {
|
||||||
|
instanceId = model.InstanceId.ValueString()
|
||||||
|
} else {
|
||||||
|
if identity.InstanceID.IsNull() || identity.InstanceID.IsUnknown() {
|
||||||
|
return "", "", "", "", fmt.Errorf("instance_id not found in config")
|
||||||
|
}
|
||||||
|
instanceId = identity.InstanceID.ValueString()
|
||||||
|
}
|
||||||
|
return projectId, region, instanceId, databaseName, nil
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -236,6 +236,7 @@ func toCreatePayload(
|
||||||
conversion.StringValueToPointer(model.Version),
|
conversion.StringValueToPointer(model.Version),
|
||||||
),
|
),
|
||||||
}, nil
|
}, nil
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func toUpdatePayload(
|
func toUpdatePayload(
|
||||||
|
|
|
||||||
|
|
@ -11,7 +11,6 @@ import (
|
||||||
"github.com/hashicorp/terraform-plugin-framework/resource"
|
"github.com/hashicorp/terraform-plugin-framework/resource"
|
||||||
"github.com/hashicorp/terraform-plugin-framework/types"
|
"github.com/hashicorp/terraform-plugin-framework/types"
|
||||||
"github.com/stackitcloud/stackit-sdk-go/core/utils"
|
"github.com/stackitcloud/stackit-sdk-go/core/utils"
|
||||||
|
|
||||||
sqlserverflexbetaPkgGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
|
sqlserverflexbetaPkgGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
|
||||||
sqlserverflexbetaRs "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance/resources_gen"
|
sqlserverflexbetaRs "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance/resources_gen"
|
||||||
)
|
)
|
||||||
|
|
@ -29,13 +28,11 @@ func Test_handleDSEncryption(t *testing.T) {
|
||||||
// TODO: Add test cases.
|
// TODO: Add test cases.
|
||||||
}
|
}
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
tt.name, func(t *testing.T) {
|
if got := handleDSEncryption(tt.args.m, tt.args.resp); !reflect.DeepEqual(got, tt.want) {
|
||||||
if got := handleDSEncryption(t.Context(), tt.args.m, tt.args.resp); !reflect.DeepEqual(got, tt.want) {
|
|
||||||
t.Errorf("handleDSEncryption() = %v, want %v", got, tt.want)
|
t.Errorf("handleDSEncryption() = %v, want %v", got, tt.want)
|
||||||
}
|
}
|
||||||
},
|
})
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -89,13 +86,11 @@ func Test_handleEncryption(t *testing.T) {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
tt.name, func(t *testing.T) {
|
if got := handleEncryption(tt.args.m, tt.args.resp); !reflect.DeepEqual(got, tt.want) {
|
||||||
if got := handleEncryption(t.Context(), tt.args.m, tt.args.resp); !reflect.DeepEqual(got, tt.want) {
|
|
||||||
t.Errorf("handleEncryption() = %v, want %v", got, tt.want)
|
t.Errorf("handleEncryption() = %v, want %v", got, tt.want)
|
||||||
}
|
}
|
||||||
},
|
})
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -114,18 +109,11 @@ func Test_mapDataResponseToModel(t *testing.T) {
|
||||||
// TODO: Add test cases.
|
// TODO: Add test cases.
|
||||||
}
|
}
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
tt.name, func(t *testing.T) {
|
if err := mapDataResponseToModel(tt.args.ctx, tt.args.resp, tt.args.m, tt.args.tfDiags); (err != nil) != tt.wantErr {
|
||||||
if err := mapDataResponseToModel(
|
|
||||||
tt.args.ctx,
|
|
||||||
tt.args.resp,
|
|
||||||
tt.args.m,
|
|
||||||
tt.args.tfDiags,
|
|
||||||
); (err != nil) != tt.wantErr {
|
|
||||||
t.Errorf("mapDataResponseToModel() error = %v, wantErr %v", err, tt.wantErr)
|
t.Errorf("mapDataResponseToModel() error = %v, wantErr %v", err, tt.wantErr)
|
||||||
}
|
}
|
||||||
},
|
})
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -144,18 +132,11 @@ func Test_mapResponseToModel(t *testing.T) {
|
||||||
// TODO: Add test cases.
|
// TODO: Add test cases.
|
||||||
}
|
}
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
tt.name, func(t *testing.T) {
|
if err := mapResponseToModel(tt.args.ctx, tt.args.resp, tt.args.m, tt.args.tfDiags); (err != nil) != tt.wantErr {
|
||||||
if err := mapResponseToModel(
|
|
||||||
tt.args.ctx,
|
|
||||||
tt.args.resp,
|
|
||||||
tt.args.m,
|
|
||||||
tt.args.tfDiags,
|
|
||||||
); (err != nil) != tt.wantErr {
|
|
||||||
t.Errorf("mapResponseToModel() error = %v, wantErr %v", err, tt.wantErr)
|
t.Errorf("mapResponseToModel() error = %v, wantErr %v", err, tt.wantErr)
|
||||||
}
|
}
|
||||||
},
|
})
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -227,8 +208,7 @@ func Test_toCreatePayload(t *testing.T) {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
tt.name, func(t *testing.T) {
|
|
||||||
got, err := toCreatePayload(tt.args.ctx, tt.args.model)
|
got, err := toCreatePayload(tt.args.ctx, tt.args.model)
|
||||||
if (err != nil) != tt.wantErr {
|
if (err != nil) != tt.wantErr {
|
||||||
t.Errorf("toCreatePayload() error = %v, wantErr %v", err, tt.wantErr)
|
t.Errorf("toCreatePayload() error = %v, wantErr %v", err, tt.wantErr)
|
||||||
|
|
@ -237,8 +217,10 @@ func Test_toCreatePayload(t *testing.T) {
|
||||||
if diff := cmp.Diff(tt.want, got); diff != "" {
|
if diff := cmp.Diff(tt.want, got); diff != "" {
|
||||||
t.Errorf("model mismatch (-want +got):\n%s", diff)
|
t.Errorf("model mismatch (-want +got):\n%s", diff)
|
||||||
}
|
}
|
||||||
},
|
//if !reflect.DeepEqual(got, tt.want) {
|
||||||
)
|
// t.Errorf("toCreatePayload() got = %v, want %v", got, tt.want)
|
||||||
|
//}
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -257,8 +239,7 @@ func Test_toUpdatePayload(t *testing.T) {
|
||||||
// TODO: Add test cases.
|
// TODO: Add test cases.
|
||||||
}
|
}
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
tt.name, func(t *testing.T) {
|
|
||||||
got, err := toUpdatePayload(tt.args.ctx, tt.args.m, tt.args.resp)
|
got, err := toUpdatePayload(tt.args.ctx, tt.args.m, tt.args.resp)
|
||||||
if (err != nil) != tt.wantErr {
|
if (err != nil) != tt.wantErr {
|
||||||
t.Errorf("toUpdatePayload() error = %v, wantErr %v", err, tt.wantErr)
|
t.Errorf("toUpdatePayload() error = %v, wantErr %v", err, tt.wantErr)
|
||||||
|
|
@ -267,7 +248,6 @@ func Test_toUpdatePayload(t *testing.T) {
|
||||||
if !reflect.DeepEqual(got, tt.want) {
|
if !reflect.DeepEqual(got, tt.want) {
|
||||||
t.Errorf("toUpdatePayload() got = %v, want %v", got, tt.want)
|
t.Errorf("toUpdatePayload() got = %v, want %v", got, tt.want)
|
||||||
}
|
}
|
||||||
},
|
})
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -54,7 +54,7 @@ type InstanceResourceIdentityModel struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *instanceResource) Metadata(
|
func (r *instanceResource) Metadata(
|
||||||
_ context.Context,
|
ctx context.Context,
|
||||||
req resource.MetadataRequest,
|
req resource.MetadataRequest,
|
||||||
resp *resource.MetadataResponse,
|
resp *resource.MetadataResponse,
|
||||||
) {
|
) {
|
||||||
|
|
@ -64,7 +64,7 @@ func (r *instanceResource) Metadata(
|
||||||
//go:embed planModifiers.yaml
|
//go:embed planModifiers.yaml
|
||||||
var modifiersFileByte []byte
|
var modifiersFileByte []byte
|
||||||
|
|
||||||
func (r *instanceResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
|
func (r *instanceResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
|
||||||
s := sqlserverflexbetaResGen.InstanceResourceSchema(ctx)
|
s := sqlserverflexbetaResGen.InstanceResourceSchema(ctx)
|
||||||
|
|
||||||
fields, err := utils.ReadModifiersConfig(modifiersFileByte)
|
fields, err := utils.ReadModifiersConfig(modifiersFileByte)
|
||||||
|
|
@ -147,6 +147,7 @@ func (r *instanceResource) ModifyPlan(
|
||||||
req resource.ModifyPlanRequest,
|
req resource.ModifyPlanRequest,
|
||||||
resp *resource.ModifyPlanResponse,
|
resp *resource.ModifyPlanResponse,
|
||||||
) { // nolint:gocritic // function signature required by Terraform
|
) { // nolint:gocritic // function signature required by Terraform
|
||||||
|
|
||||||
// skip initial empty configuration to avoid follow-up errors
|
// skip initial empty configuration to avoid follow-up errors
|
||||||
if req.Config.Raw.IsNull() {
|
if req.Config.Raw.IsNull() {
|
||||||
return
|
return
|
||||||
|
|
|
||||||
|
|
@ -349,8 +349,8 @@ func TestAccInstanceNoEncryption(t *testing.T) {
|
||||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption"),
|
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption"),
|
||||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_id"),
|
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_id"),
|
||||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_version"),
|
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_version"),
|
||||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"),
|
//resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"),
|
||||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"),
|
//resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"),
|
||||||
|
|
||||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.access_scope"),
|
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.access_scope"),
|
||||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.acl"),
|
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.acl"),
|
||||||
|
|
@ -450,8 +450,8 @@ func TestAccInstanceEncryption(t *testing.T) {
|
||||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption"),
|
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption"),
|
||||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_id"),
|
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_id"),
|
||||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_version"),
|
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_version"),
|
||||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"),
|
//resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"),
|
||||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"),
|
//resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"),
|
||||||
|
|
||||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.access_scope"),
|
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.access_scope"),
|
||||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.acl"),
|
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.acl"),
|
||||||
|
|
|
||||||
|
|
@ -20,6 +20,8 @@ import (
|
||||||
|
|
||||||
var _ datasource.DataSource = (*userDataSource)(nil)
|
var _ datasource.DataSource = (*userDataSource)(nil)
|
||||||
|
|
||||||
|
const errorPrefix = "[Sqlserverflexbeta - User]"
|
||||||
|
|
||||||
func NewUserDataSource() datasource.DataSource {
|
func NewUserDataSource() datasource.DataSource {
|
||||||
return &userDataSource{}
|
return &userDataSource{}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -20,6 +20,7 @@ import (
|
||||||
|
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
|
||||||
|
sqlserverflexbetagen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/user/resources_gen"
|
||||||
sqlserverflexbetaUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/utils"
|
sqlserverflexbetaUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/utils"
|
||||||
sqlserverflexbetaWait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexbeta"
|
sqlserverflexbetaWait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexbeta"
|
||||||
|
|
||||||
|
|
@ -58,7 +59,7 @@ type userResource struct {
|
||||||
providerData core.ProviderData
|
providerData core.ProviderData
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *userResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
|
func (r *userResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
|
||||||
resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_user"
|
resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_user"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -107,23 +108,23 @@ func (r *userResource) ModifyPlan(
|
||||||
}
|
}
|
||||||
|
|
||||||
//// TODO: verify if this is needed - START
|
//// TODO: verify if this is needed - START
|
||||||
// var planRoles []string
|
//var planRoles []string
|
||||||
// diags := planModel.Roles.ElementsAs(ctx, &planRoles, false)
|
//diags := planModel.Roles.ElementsAs(ctx, &planRoles, false)
|
||||||
// resp.Diagnostics.Append(diags...)
|
//resp.Diagnostics.Append(diags...)
|
||||||
// if diags.HasError() {
|
//if diags.HasError() {
|
||||||
// return
|
// return
|
||||||
//}
|
//}
|
||||||
// slices.Sort(planRoles)
|
//slices.Sort(planRoles)
|
||||||
// var roles []attr.Value
|
//var roles []attr.Value
|
||||||
// for _, role := range planRoles {
|
//for _, role := range planRoles {
|
||||||
// roles = append(roles, types.StringValue(string(role)))
|
// roles = append(roles, types.StringValue(string(role)))
|
||||||
//}
|
//}
|
||||||
// rolesSet, diags := types.ListValue(types.StringType, roles)
|
//rolesSet, diags := types.ListValue(types.StringType, roles)
|
||||||
// resp.Diagnostics.Append(diags...)
|
//resp.Diagnostics.Append(diags...)
|
||||||
// if diags.HasError() {
|
//if diags.HasError() {
|
||||||
// return
|
// return
|
||||||
//}
|
//}
|
||||||
// planModel.Roles = rolesSet
|
//planModel.Roles = rolesSet
|
||||||
//// TODO: verify if this is needed - END
|
//// TODO: verify if this is needed - END
|
||||||
|
|
||||||
resp.Diagnostics.Append(resp.Plan.Set(ctx, planModel)...)
|
resp.Diagnostics.Append(resp.Plan.Set(ctx, planModel)...)
|
||||||
|
|
@ -137,7 +138,7 @@ var modifiersFileByte []byte
|
||||||
|
|
||||||
// Schema defines the schema for the resource.
|
// Schema defines the schema for the resource.
|
||||||
func (r *userResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
|
func (r *userResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
|
||||||
s := sqlserverflexbetaResGen.UserResourceSchema(ctx)
|
s := sqlserverflexbetagen.UserResourceSchema(ctx)
|
||||||
|
|
||||||
fields, err := utils.ReadModifiersConfig(modifiersFileByte)
|
fields, err := utils.ReadModifiersConfig(modifiersFileByte)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
@ -434,12 +435,7 @@ func (r *userResource) Update(
|
||||||
resp *resource.UpdateResponse,
|
resp *resource.UpdateResponse,
|
||||||
) { // nolint:gocritic // function signature required by Terraform
|
) { // nolint:gocritic // function signature required by Terraform
|
||||||
// Update shouldn't be called
|
// Update shouldn't be called
|
||||||
core.LogAndAddError(
|
core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating user", "an SQL server user can not be updated, only created")
|
||||||
ctx,
|
|
||||||
&resp.Diagnostics,
|
|
||||||
"Error updating user",
|
|
||||||
"an SQL server user can not be updated, only created",
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Delete deletes the resource and removes the Terraform state on success.
|
// Delete deletes the resource and removes the Terraform state on success.
|
||||||
|
|
@ -493,6 +489,7 @@ func (r *userResource) Delete(
|
||||||
// Delete existing record set
|
// Delete existing record set
|
||||||
_, err = sqlserverflexbetaWait.DeleteUserWaitHandler(ctx, r.client, projectId, region, instanceId, userId).
|
_, err = sqlserverflexbetaWait.DeleteUserWaitHandler(ctx, r.client, projectId, region, instanceId, userId).
|
||||||
WaitWithContext(ctx)
|
WaitWithContext(ctx)
|
||||||
|
// err := r.client.DeleteUserRequest(ctx, arg.projectId, arg.region, arg.instanceId, userId).Execute()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
core.LogAndAddError(ctx, &resp.Diagnostics, "User Delete Error", fmt.Sprintf("Calling API: %v", err))
|
core.LogAndAddError(ctx, &resp.Diagnostics, "User Delete Error", fmt.Sprintf("Calling API: %v", err))
|
||||||
return
|
return
|
||||||
|
|
|
||||||
|
|
@ -108,7 +108,7 @@ func CreateInstanceWaitHandler(
|
||||||
)
|
)
|
||||||
if extendedTimeout < 3 {
|
if extendedTimeout < 3 {
|
||||||
maxWait += time.Minute * 5
|
maxWait += time.Minute * 5
|
||||||
extendedTimeout++
|
extendedTimeout = extendedTimeout + 1
|
||||||
if *s.Network.AccessScope == "SNA" {
|
if *s.Network.AccessScope == "SNA" {
|
||||||
ready := true
|
ready := true
|
||||||
if s.Network == nil || s.Network.InstanceAddress == nil {
|
if s.Network == nil || s.Network.InstanceAddress == nil {
|
||||||
|
|
@ -228,7 +228,7 @@ func GetUserByIdWaitHandler(
|
||||||
if userId > math.MaxInt32 {
|
if userId > math.MaxInt32 {
|
||||||
return false, nil, fmt.Errorf("userId value is too big for int32")
|
return false, nil, fmt.Errorf("userId value is too big for int32")
|
||||||
}
|
}
|
||||||
userId32 := int32(userId) //nolint:gosec // we need to convert databaseId to int32 because API expects int32
|
userId32 := int32(userId)
|
||||||
s, err := a.GetUserRequestExecute(ctx, projectId, region, instanceId, userId32)
|
s, err := a.GetUserRequestExecute(ctx, projectId, region, instanceId, userId32)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
var oapiErr *oapierror.GenericOpenAPIError
|
var oapiErr *oapierror.GenericOpenAPIError
|
||||||
|
|
@ -239,11 +239,9 @@ func GetUserByIdWaitHandler(
|
||||||
switch oapiErr.StatusCode {
|
switch oapiErr.StatusCode {
|
||||||
case http.StatusBadGateway, http.StatusGatewayTimeout, http.StatusServiceUnavailable:
|
case http.StatusBadGateway, http.StatusGatewayTimeout, http.StatusServiceUnavailable:
|
||||||
case http.StatusNotFound:
|
case http.StatusNotFound:
|
||||||
tflog.Warn(
|
tflog.Warn(ctx, "api responded with status", map[string]interface{}{
|
||||||
ctx, "api responded with status", map[string]interface{}{
|
|
||||||
"status": oapiErr.StatusCode,
|
"status": oapiErr.StatusCode,
|
||||||
},
|
})
|
||||||
)
|
|
||||||
return false, nil, nil
|
return false, nil, nil
|
||||||
default:
|
default:
|
||||||
return false, nil, err
|
return false, nil, err
|
||||||
|
|
@ -264,7 +262,7 @@ func GetDatabaseByIdWaitHandler(
|
||||||
) *wait.AsyncActionHandler[postgresflex.GetDatabaseResponse] {
|
) *wait.AsyncActionHandler[postgresflex.GetDatabaseResponse] {
|
||||||
handler := wait.New(
|
handler := wait.New(
|
||||||
func() (waitFinished bool, response *postgresflex.GetDatabaseResponse, err error) {
|
func() (waitFinished bool, response *postgresflex.GetDatabaseResponse, err error) {
|
||||||
dbId32 := int32(databaseId) //nolint:gosec // we need to convert databaseId to int32 because API expects int32
|
dbId32 := int32(databaseId)
|
||||||
s, err := a.GetDatabaseRequestExecute(ctx, projectId, region, instanceId, dbId32)
|
s, err := a.GetDatabaseRequestExecute(ctx, projectId, region, instanceId, dbId32)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
var oapiErr *oapierror.GenericOpenAPIError
|
var oapiErr *oapierror.GenericOpenAPIError
|
||||||
|
|
@ -274,18 +272,14 @@ func GetDatabaseByIdWaitHandler(
|
||||||
}
|
}
|
||||||
switch oapiErr.StatusCode {
|
switch oapiErr.StatusCode {
|
||||||
case http.StatusBadGateway, http.StatusGatewayTimeout, http.StatusServiceUnavailable:
|
case http.StatusBadGateway, http.StatusGatewayTimeout, http.StatusServiceUnavailable:
|
||||||
tflog.Warn(
|
tflog.Warn(ctx, "api responded with 50[2,3,4] status", map[string]interface{}{
|
||||||
ctx, "api responded with 50[2,3,4] status", map[string]interface{}{
|
|
||||||
"status": oapiErr.StatusCode,
|
"status": oapiErr.StatusCode,
|
||||||
},
|
})
|
||||||
)
|
|
||||||
return false, nil, nil
|
return false, nil, nil
|
||||||
case http.StatusNotFound:
|
case http.StatusNotFound:
|
||||||
tflog.Warn(
|
tflog.Warn(ctx, "api responded with 404 status", map[string]interface{}{
|
||||||
ctx, "api responded with 404 status", map[string]interface{}{
|
|
||||||
"status": oapiErr.StatusCode,
|
"status": oapiErr.StatusCode,
|
||||||
},
|
})
|
||||||
)
|
|
||||||
return false, nil, nil
|
return false, nil, nil
|
||||||
default:
|
default:
|
||||||
return false, nil, err
|
return false, nil, err
|
||||||
|
|
|
||||||
|
|
@ -54,12 +54,7 @@ type APIClientInterface interface {
|
||||||
instanceId string,
|
instanceId string,
|
||||||
) (*sqlserverflex.ListRolesResponse, error)
|
) (*sqlserverflex.ListRolesResponse, error)
|
||||||
|
|
||||||
ListUsersRequest(
|
ListUsersRequest(ctx context.Context, projectId string, region string, instanceId string) sqlserverflex.ApiListUsersRequestRequest
|
||||||
ctx context.Context,
|
|
||||||
projectId string,
|
|
||||||
region string,
|
|
||||||
instanceId string,
|
|
||||||
) sqlserverflex.ApiListUsersRequestRequest
|
|
||||||
|
|
||||||
ListUsersRequestExecute(
|
ListUsersRequestExecute(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
|
|
@ -261,10 +256,7 @@ func CreateDatabaseWaitHandler(
|
||||||
var oapiErr *oapierror.GenericOpenAPIError
|
var oapiErr *oapierror.GenericOpenAPIError
|
||||||
ok := errors.As(err, &oapiErr)
|
ok := errors.As(err, &oapiErr)
|
||||||
if !ok {
|
if !ok {
|
||||||
return false, nil, fmt.Errorf(
|
return false, nil, fmt.Errorf("get database - could not convert error to oapierror.GenericOpenAPIError: %s", err.Error())
|
||||||
"get database - could not convert error to oapierror.GenericOpenAPIError: %s",
|
|
||||||
err.Error(),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
if oapiErr.StatusCode != http.StatusNotFound {
|
if oapiErr.StatusCode != http.StatusNotFound {
|
||||||
return false, nil, err
|
return false, nil, err
|
||||||
|
|
@ -326,10 +318,7 @@ func WaitForUserWaitHandler(
|
||||||
var oapiErr *oapierror.GenericOpenAPIError
|
var oapiErr *oapierror.GenericOpenAPIError
|
||||||
ok := errors.As(err, &oapiErr)
|
ok := errors.As(err, &oapiErr)
|
||||||
if !ok {
|
if !ok {
|
||||||
return false, nil, fmt.Errorf(
|
return false, nil, fmt.Errorf("Wait (list users) could not convert error to oapierror.GenericOpenAPIError: %s", err.Error())
|
||||||
"wait (list users) could not convert error to oapierror.GenericOpenAPIError: %s",
|
|
||||||
err.Error(),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
if oapiErr.StatusCode != http.StatusNotFound {
|
if oapiErr.StatusCode != http.StatusNotFound {
|
||||||
return false, nil, err
|
return false, nil, err
|
||||||
|
|
|
||||||
|
|
@ -116,6 +116,7 @@ func (a *apiClientInstanceMocked) GetInstanceRequestExecute(
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
func TestCreateInstanceWaitHandler(t *testing.T) {
|
func TestCreateInstanceWaitHandler(t *testing.T) {
|
||||||
|
//stateSuccess := utils.Ptr(InstanceStateSuccess)
|
||||||
instanceId := utils.Ptr("foo")
|
instanceId := utils.Ptr("foo")
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
desc string
|
desc string
|
||||||
|
|
@ -159,7 +160,7 @@ func TestCreateInstanceWaitHandler(t *testing.T) {
|
||||||
// Storage: nil,
|
// Storage: nil,
|
||||||
// Version: nil,
|
// Version: nil,
|
||||||
// },
|
// },
|
||||||
// },
|
//},
|
||||||
{
|
{
|
||||||
desc: "create_failed",
|
desc: "create_failed",
|
||||||
instanceId: *instanceId,
|
instanceId: *instanceId,
|
||||||
|
|
|
||||||
|
|
@ -54,12 +54,7 @@ type APIClientInterface interface {
|
||||||
instanceId string,
|
instanceId string,
|
||||||
) (*sqlserverflex.ListRolesResponse, error)
|
) (*sqlserverflex.ListRolesResponse, error)
|
||||||
|
|
||||||
ListUsersRequest(
|
ListUsersRequest(ctx context.Context, projectId string, region string, instanceId string) sqlserverflex.ApiListUsersRequestRequest
|
||||||
ctx context.Context,
|
|
||||||
projectId string,
|
|
||||||
region string,
|
|
||||||
instanceId string,
|
|
||||||
) sqlserverflex.ApiListUsersRequestRequest
|
|
||||||
|
|
||||||
ListUsersRequestExecute(
|
ListUsersRequestExecute(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
|
|
@ -167,17 +162,9 @@ func CreateInstanceWaitHandler(
|
||||||
}
|
}
|
||||||
return true, s, nil
|
return true, s, nil
|
||||||
case strings.ToLower(InstanceStateUnknown):
|
case strings.ToLower(InstanceStateUnknown):
|
||||||
return true, nil, fmt.Errorf(
|
return true, nil, fmt.Errorf("create failed for instance %s with status %s", instanceId, InstanceStateUnknown)
|
||||||
"create failed for instance %s with status %s",
|
|
||||||
instanceId,
|
|
||||||
InstanceStateUnknown,
|
|
||||||
)
|
|
||||||
case strings.ToLower(InstanceStateFailed):
|
case strings.ToLower(InstanceStateFailed):
|
||||||
return true, nil, fmt.Errorf(
|
return true, nil, fmt.Errorf("create failed for instance %s with status %s", instanceId, InstanceStateFailed)
|
||||||
"create failed for instance %s with status %s",
|
|
||||||
instanceId,
|
|
||||||
InstanceStateFailed,
|
|
||||||
)
|
|
||||||
case strings.ToLower(InstanceStatePending), strings.ToLower(InstanceStateProcessing):
|
case strings.ToLower(InstanceStatePending), strings.ToLower(InstanceStateProcessing):
|
||||||
tflog.Info(
|
tflog.Info(
|
||||||
ctx, "request is being handled", map[string]interface{}{
|
ctx, "request is being handled", map[string]interface{}{
|
||||||
|
|
@ -281,10 +268,7 @@ func CreateDatabaseWaitHandler(
|
||||||
var oapiErr *oapierror.GenericOpenAPIError
|
var oapiErr *oapierror.GenericOpenAPIError
|
||||||
ok := errors.As(err, &oapiErr)
|
ok := errors.As(err, &oapiErr)
|
||||||
if !ok {
|
if !ok {
|
||||||
return false, nil, fmt.Errorf(
|
return false, nil, fmt.Errorf("get database - could not convert error to oapierror.GenericOpenAPIError: %s", err.Error())
|
||||||
"get database - could not convert error to oapierror.GenericOpenAPIError: %s",
|
|
||||||
err.Error(),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
if oapiErr.StatusCode != http.StatusNotFound {
|
if oapiErr.StatusCode != http.StatusNotFound {
|
||||||
return false, nil, err
|
return false, nil, err
|
||||||
|
|
@ -346,10 +330,7 @@ func WaitForUserWaitHandler(
|
||||||
var oapiErr *oapierror.GenericOpenAPIError
|
var oapiErr *oapierror.GenericOpenAPIError
|
||||||
ok := errors.As(err, &oapiErr)
|
ok := errors.As(err, &oapiErr)
|
||||||
if !ok {
|
if !ok {
|
||||||
return false, nil, fmt.Errorf(
|
return false, nil, fmt.Errorf("Wait (list users) could not convert error to oapierror.GenericOpenAPIError: %s", err.Error())
|
||||||
"wait (list users) could not convert error to oapierror.GenericOpenAPIError: %s",
|
|
||||||
err.Error(),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
if oapiErr.StatusCode != http.StatusNotFound {
|
if oapiErr.StatusCode != http.StatusNotFound {
|
||||||
return false, nil, err
|
return false, nil, err
|
||||||
|
|
|
||||||
|
|
@ -116,6 +116,7 @@ func (a *apiClientInstanceMocked) GetInstanceRequestExecute(
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
func TestCreateInstanceWaitHandler(t *testing.T) {
|
func TestCreateInstanceWaitHandler(t *testing.T) {
|
||||||
|
//stateSuccess := utils.Ptr(InstanceStateSuccess)
|
||||||
instanceId := utils.Ptr("foo")
|
instanceId := utils.Ptr("foo")
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
desc string
|
desc string
|
||||||
|
|
@ -159,7 +160,7 @@ func TestCreateInstanceWaitHandler(t *testing.T) {
|
||||||
// Storage: nil,
|
// Storage: nil,
|
||||||
// Version: nil,
|
// Version: nil,
|
||||||
// },
|
// },
|
||||||
// },
|
//},
|
||||||
{
|
{
|
||||||
desc: "create_failed",
|
desc: "create_failed",
|
||||||
instanceId: *instanceId,
|
instanceId: *instanceId,
|
||||||
|
|
|
||||||
|
|
@ -46,7 +46,6 @@ var (
|
||||||
_ provider.Provider = &Provider{}
|
_ provider.Provider = &Provider{}
|
||||||
)
|
)
|
||||||
|
|
||||||
//nolint:unused // These constants are defined for future use in retry logic for HTTP requests, which is not yet implemented.
|
|
||||||
const (
|
const (
|
||||||
// maxRetries is the maximum number of retries for a failed HTTP request.
|
// maxRetries is the maximum number of retries for a failed HTTP request.
|
||||||
maxRetries = 3
|
maxRetries = 3
|
||||||
|
|
@ -124,7 +123,6 @@ type providerModel struct {
|
||||||
|
|
||||||
// Schema defines the provider-level schema for configuration data.
|
// Schema defines the provider-level schema for configuration data.
|
||||||
func (p *Provider) Schema(_ context.Context, _ provider.SchemaRequest, resp *provider.SchemaResponse) {
|
func (p *Provider) Schema(_ context.Context, _ provider.SchemaRequest, resp *provider.SchemaResponse) {
|
||||||
//nolint:gosec // These are just descriptions, not actual credentials or sensitive information.
|
|
||||||
descriptions := map[string]string{
|
descriptions := map[string]string{
|
||||||
"credentials_path": "Path of JSON from where the credentials are read. Takes precedence over the env var `STACKIT_CREDENTIALS_PATH`. Default value is `~/.stackit/credentials.json`.",
|
"credentials_path": "Path of JSON from where the credentials are read. Takes precedence over the env var `STACKIT_CREDENTIALS_PATH`. Default value is `~/.stackit/credentials.json`.",
|
||||||
"service_account_token": "Token used for authentication. If set, the token flow will be used to authenticate all operations.",
|
"service_account_token": "Token used for authentication. If set, the token flow will be used to authenticate all operations.",
|
||||||
|
|
@ -491,8 +489,7 @@ func (p *Provider) Configure(ctx context.Context, req provider.ConfigureRequest,
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
//nolint:gocritic // maybe later in the code
|
//roundTripper := core.NewRetryRoundTripper(
|
||||||
// roundTripper := core.NewRetryRoundTripper(
|
|
||||||
// baseRoundTripper,
|
// baseRoundTripper,
|
||||||
// maxRetries,
|
// maxRetries,
|
||||||
// initialDelay,
|
// initialDelay,
|
||||||
|
|
|
||||||
|
|
@ -12,12 +12,11 @@ import (
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/golang-jwt/jwt/v5"
|
"github.com/golang-jwt/jwt/v5"
|
||||||
"github.com/google/go-cmp/cmp"
|
test "github.com/hashicorp/terraform-plugin-testing/helper/resource"
|
||||||
test "github.com/hashicorp/terraform-plugin-testing/helper/resource" //nolint:staticcheck // used for acceptance testing
|
|
||||||
"github.com/jarcoal/httpmock"
|
"github.com/jarcoal/httpmock"
|
||||||
"github.com/stackitcloud/stackit-sdk-go/core/clients"
|
"github.com/stackitcloud/stackit-sdk-go/core/clients"
|
||||||
"github.com/stackitcloud/stackit-sdk-go/core/utils"
|
"github.com/stackitcloud/stackit-sdk-go/core/utils"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
|
||||||
postgresFlexAlphaFlavor "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavor"
|
postgresFlexAlphaFlavor "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavor"
|
||||||
|
|
||||||
|
|
@ -41,7 +40,7 @@ import (
|
||||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/internal/testutils"
|
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/internal/testutils"
|
||||||
|
|
||||||
"github.com/hashicorp/terraform-plugin-testing/config"
|
"github.com/hashicorp/terraform-plugin-testing/config"
|
||||||
"github.com/hashicorp/terraform-plugin-testing/helper/resource" //nolint:staticcheck // used for acceptance testing
|
"github.com/hashicorp/terraform-plugin-testing/helper/resource"
|
||||||
)
|
)
|
||||||
|
|
||||||
//go:embed testdata/provider-credentials.tf
|
//go:embed testdata/provider-credentials.tf
|
||||||
|
|
@ -68,15 +67,12 @@ func TestMshTest(t *testing.T) {
|
||||||
|
|
||||||
testutils.ActivateEnvironmentHttpMocks()
|
testutils.ActivateEnvironmentHttpMocks()
|
||||||
|
|
||||||
httpmock.RegisterResponder(
|
httpmock.RegisterResponder("POST", `https://service-account.api.stackit.cloud/token`,
|
||||||
"POST", `https://service-account.api.stackit.cloud/token`,
|
func(req *http.Request) (*http.Response, error) {
|
||||||
func(_ *http.Request) (*http.Response, error) {
|
token := jwt.NewWithClaims(jwt.SigningMethodHS256, jwt.MapClaims{
|
||||||
token := jwt.NewWithClaims(
|
|
||||||
jwt.SigningMethodHS256, jwt.MapClaims{
|
|
||||||
"foo": "bar",
|
"foo": "bar",
|
||||||
"nbf": time.Date(2015, 10, 10, 12, 0, 0, 0, time.UTC).Unix(),
|
"nbf": time.Date(2015, 10, 10, 12, 0, 0, 0, time.UTC).Unix(),
|
||||||
},
|
})
|
||||||
)
|
|
||||||
// Sign and get the complete encoded token as a string using the secret
|
// Sign and get the complete encoded token as a string using the secret
|
||||||
tokenString, err := token.SignedString([]byte("mySecret"))
|
tokenString, err := token.SignedString([]byte("mySecret"))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
@ -92,13 +88,10 @@ func TestMshTest(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
return httpmock.NewJsonResponse(http.StatusOK, tR)
|
return httpmock.NewJsonResponse(http.StatusOK, tR)
|
||||||
},
|
})
|
||||||
)
|
|
||||||
|
|
||||||
httpmock.RegisterResponder(
|
httpmock.RegisterResponder("GET", `https://postgres-flex-service.api.eu01.stackit.cloud/v3alpha1/projects/xyz-project-id/regions/eu01/flavors?page=1&size=25&sort=id.asc`,
|
||||||
"GET",
|
func(req *http.Request) (*http.Response, error) {
|
||||||
`https://postgres-flex-service.api.eu01.stackit.cloud/v3alpha1/projects/xyz-project-id/regions/eu01/flavors?page=1&size=25&sort=id.asc`,
|
|
||||||
func(_ *http.Request) (*http.Response, error) {
|
|
||||||
res := postgresflexalpha.GetFlavorsResponse{
|
res := postgresflexalpha.GetFlavorsResponse{
|
||||||
Flavors: &[]postgresflexalpha.ListFlavors{
|
Flavors: &[]postgresflexalpha.ListFlavors{
|
||||||
{
|
{
|
||||||
|
|
@ -127,8 +120,7 @@ func TestMshTest(t *testing.T) {
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
test.Test(
|
test.Test(t, test.TestCase{
|
||||||
t, test.TestCase{
|
|
||||||
IsUnitTest: true,
|
IsUnitTest: true,
|
||||||
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
||||||
Steps: []test.TestStep{
|
Steps: []test.TestStep{
|
||||||
|
|
@ -136,8 +128,7 @@ func TestMshTest(t *testing.T) {
|
||||||
ConfigVariables: map[string]config.Variable{
|
ConfigVariables: map[string]config.Variable{
|
||||||
"project_id": config.StringVariable("xyz-project-id"),
|
"project_id": config.StringVariable("xyz-project-id"),
|
||||||
},
|
},
|
||||||
Config: fmt.Sprintf(
|
Config: fmt.Sprintf(`
|
||||||
`
|
|
||||||
provider "stackitprivatepreview" {
|
provider "stackitprivatepreview" {
|
||||||
default_region = "%[1]s"
|
default_region = "%[1]s"
|
||||||
service_account_key_path = "%[2]s"
|
service_account_key_path = "%[2]s"
|
||||||
|
|
@ -158,8 +149,7 @@ func TestMshTest(t *testing.T) {
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
})
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestUnitProviderHasChildDataSources_Basic(t *testing.T) {
|
func TestUnitProviderHasChildDataSources_Basic(t *testing.T) {
|
||||||
|
|
@ -181,24 +171,12 @@ func TestUnitProviderHasChildDataSources_Basic(t *testing.T) {
|
||||||
sqlserverFlexBetaUser.NewUserDataSource(),
|
sqlserverFlexBetaUser.NewUserDataSource(),
|
||||||
sqlserverFlexBetaFlavor.NewFlavorDataSource(),
|
sqlserverFlexBetaFlavor.NewFlavorDataSource(),
|
||||||
}
|
}
|
||||||
provider, ok := stackit.New("testing")().(*stackit.Provider)
|
datasources := stackit.New("testing")().(*stackit.Provider).DataSources(context.Background())
|
||||||
if !ok {
|
|
||||||
t.Fatal("could not assert provider type")
|
|
||||||
}
|
|
||||||
datasources := provider.DataSources(context.Background())
|
|
||||||
|
|
||||||
expectedMap := map[string]struct{}{}
|
if !reflect.DeepEqual(len(expectedDataSources), len(datasources)) {
|
||||||
for _, d := range expectedDataSources {
|
|
||||||
expectedMap[reflect.TypeOf(d).String()] = struct{}{}
|
|
||||||
}
|
|
||||||
|
|
||||||
actualMap := map[string]struct{}{}
|
|
||||||
for _, d := range datasources {
|
for _, d := range datasources {
|
||||||
actualMap[reflect.TypeOf(d()).String()] = struct{}{}
|
require.Containsf(t, expectedDataSources, d(), "Data source %+v was not expected", reflect.TypeOf(d()))
|
||||||
}
|
}
|
||||||
|
|
||||||
if diff := cmp.Diff(expectedMap, actualMap); diff != "" {
|
|
||||||
t.Errorf("DataSources mismatch (-expected +actual):\n%s", diff)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -216,24 +194,12 @@ func TestUnitProviderHasChildResources_Basic(t *testing.T) {
|
||||||
sqlserverFlexBetaUser.NewUserResource(),
|
sqlserverFlexBetaUser.NewUserResource(),
|
||||||
sqlserverflexBetaDatabase.NewDatabaseResource(),
|
sqlserverflexBetaDatabase.NewDatabaseResource(),
|
||||||
}
|
}
|
||||||
provider, ok := stackit.New("testing")().(*stackit.Provider)
|
resources := stackit.New("testing")().(*stackit.Provider).Resources(context.Background())
|
||||||
if !ok {
|
|
||||||
t.Fatal("could not assert provider type")
|
|
||||||
}
|
|
||||||
resources := provider.Resources(context.Background())
|
|
||||||
|
|
||||||
expectedMap := map[string]struct{}{}
|
if !reflect.DeepEqual(len(expectedResources), len(resources)) {
|
||||||
for _, r := range expectedResources {
|
for _, d := range resources {
|
||||||
expectedMap[reflect.TypeOf(r).String()] = struct{}{}
|
require.Containsf(t, expectedResources, d(), "Resource %+v was not expected", reflect.TypeOf(d()))
|
||||||
}
|
}
|
||||||
|
|
||||||
actualMap := map[string]struct{}{}
|
|
||||||
for _, r := range resources {
|
|
||||||
actualMap[reflect.TypeOf(r()).String()] = struct{}{}
|
|
||||||
}
|
|
||||||
|
|
||||||
if diff := cmp.Diff(expectedMap, actualMap); diff != "" {
|
|
||||||
t.Errorf("Resources mismatch (-expected +actual):\n%s", diff)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -243,15 +209,14 @@ func TestAccEnvVarServiceAccountPathValid(t *testing.T) {
|
||||||
if v := os.Getenv(resource.EnvTfAcc); v == "" {
|
if v := os.Getenv(resource.EnvTfAcc); v == "" {
|
||||||
t.Skipf(
|
t.Skipf(
|
||||||
"Acceptance tests skipped unless env '%s' set",
|
"Acceptance tests skipped unless env '%s' set",
|
||||||
resource.EnvTfAcc,
|
resource.EnvTfAcc)
|
||||||
)
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// t.Setenv("STACKIT_CREDENTIALS_PATH", "")
|
||||||
tempHomeFolder := testutils.CreateTemporaryHome(true, t)
|
tempHomeFolder := testutils.CreateTemporaryHome(true, t)
|
||||||
defer testutils.CleanupTemporaryHome(tempHomeFolder, t)
|
defer testutils.CleanupTemporaryHome(tempHomeFolder, t)
|
||||||
resource.Test(
|
resource.Test(t, resource.TestCase{
|
||||||
t, resource.TestCase{
|
|
||||||
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
||||||
Steps: []resource.TestStep{
|
Steps: []resource.TestStep{
|
||||||
{
|
{
|
||||||
|
|
@ -260,8 +225,7 @@ func TestAccEnvVarServiceAccountPathValid(t *testing.T) {
|
||||||
Config: providerCredentialConfig,
|
Config: providerCredentialConfig,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
})
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestAccEnvVarServiceAccountPathInvalid(t *testing.T) {
|
func TestAccEnvVarServiceAccountPathInvalid(t *testing.T) {
|
||||||
|
|
@ -269,8 +233,7 @@ func TestAccEnvVarServiceAccountPathInvalid(t *testing.T) {
|
||||||
t.Setenv("STACKIT_CREDENTIALS_PATH", "")
|
t.Setenv("STACKIT_CREDENTIALS_PATH", "")
|
||||||
tempHomeFolder := testutils.CreateTemporaryHome(false, t)
|
tempHomeFolder := testutils.CreateTemporaryHome(false, t)
|
||||||
defer testutils.CleanupTemporaryHome(tempHomeFolder, t)
|
defer testutils.CleanupTemporaryHome(tempHomeFolder, t)
|
||||||
resource.Test(
|
resource.Test(t, resource.TestCase{
|
||||||
t, resource.TestCase{
|
|
||||||
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
||||||
Steps: []resource.TestStep{
|
Steps: []resource.TestStep{
|
||||||
{
|
{
|
||||||
|
|
@ -280,8 +243,7 @@ func TestAccEnvVarServiceAccountPathInvalid(t *testing.T) {
|
||||||
ExpectError: regexp.MustCompile(`undefined response type, status code 401`),
|
ExpectError: regexp.MustCompile(`undefined response type, status code 401`),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
})
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestAccCredentialsFileValid(t *testing.T) {
|
func TestAccCredentialsFileValid(t *testing.T) {
|
||||||
|
|
@ -289,8 +251,7 @@ func TestAccCredentialsFileValid(t *testing.T) {
|
||||||
t.Setenv("STACKIT_CREDENTIALS_PATH", "")
|
t.Setenv("STACKIT_CREDENTIALS_PATH", "")
|
||||||
tempHomeFolder := testutils.CreateTemporaryHome(true, t)
|
tempHomeFolder := testutils.CreateTemporaryHome(true, t)
|
||||||
defer testutils.CleanupTemporaryHome(tempHomeFolder, t)
|
defer testutils.CleanupTemporaryHome(tempHomeFolder, t)
|
||||||
resource.Test(
|
resource.Test(t, resource.TestCase{
|
||||||
t, resource.TestCase{
|
|
||||||
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
||||||
Steps: []resource.TestStep{
|
Steps: []resource.TestStep{
|
||||||
{
|
{
|
||||||
|
|
@ -299,8 +260,7 @@ func TestAccCredentialsFileValid(t *testing.T) {
|
||||||
Config: providerCredentialConfig,
|
Config: providerCredentialConfig,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
})
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestAccCredentialsFileInvalid(t *testing.T) {
|
func TestAccCredentialsFileInvalid(t *testing.T) {
|
||||||
|
|
@ -308,8 +268,7 @@ func TestAccCredentialsFileInvalid(t *testing.T) {
|
||||||
t.Setenv("STACKIT_CREDENTIALS_PATH", "")
|
t.Setenv("STACKIT_CREDENTIALS_PATH", "")
|
||||||
tempHomeFolder := testutils.CreateTemporaryHome(false, t)
|
tempHomeFolder := testutils.CreateTemporaryHome(false, t)
|
||||||
defer testutils.CleanupTemporaryHome(tempHomeFolder, t)
|
defer testutils.CleanupTemporaryHome(tempHomeFolder, t)
|
||||||
resource.Test(
|
resource.Test(t, resource.TestCase{
|
||||||
t, resource.TestCase{
|
|
||||||
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
||||||
Steps: []resource.TestStep{
|
Steps: []resource.TestStep{
|
||||||
{
|
{
|
||||||
|
|
@ -319,8 +278,7 @@ func TestAccCredentialsFileInvalid(t *testing.T) {
|
||||||
ExpectError: regexp.MustCompile(`Jwt is not in(\r\n|\r|\n)the form of Header.Payload.Signature`),
|
ExpectError: regexp.MustCompile(`Jwt is not in(\r\n|\r|\n)the form of Header.Payload.Signature`),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
})
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestAccProviderConfigureValidValues(t *testing.T) {
|
func TestAccProviderConfigureValidValues(t *testing.T) {
|
||||||
|
|
@ -329,25 +287,21 @@ func TestAccProviderConfigureValidValues(t *testing.T) {
|
||||||
if v := os.Getenv(resource.EnvTfAcc); v == "" {
|
if v := os.Getenv(resource.EnvTfAcc); v == "" {
|
||||||
t.Skipf(
|
t.Skipf(
|
||||||
"Acceptance tests skipped unless env '%s' set",
|
"Acceptance tests skipped unless env '%s' set",
|
||||||
resource.EnvTfAcc,
|
resource.EnvTfAcc)
|
||||||
)
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
t.Setenv("STACKIT_CREDENTIALS_PATH", "")
|
t.Setenv("STACKIT_CREDENTIALS_PATH", "")
|
||||||
tempHomeFolder := testutils.CreateTemporaryHome(true, t)
|
tempHomeFolder := testutils.CreateTemporaryHome(true, t)
|
||||||
defer testutils.CleanupTemporaryHome(tempHomeFolder, t)
|
defer testutils.CleanupTemporaryHome(tempHomeFolder, t)
|
||||||
resource.Test(
|
resource.Test(t, resource.TestCase{
|
||||||
t, resource.TestCase{
|
|
||||||
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
||||||
Steps: []resource.TestStep{
|
Steps: []resource.TestStep{
|
||||||
{
|
{ // valid provider attributes
|
||||||
// valid provider attributes
|
|
||||||
ConfigVariables: testConfigProviderCredentials,
|
ConfigVariables: testConfigProviderCredentials,
|
||||||
Config: providerValidAttributes,
|
Config: providerValidAttributes,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
})
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestAccProviderConfigureAnInvalidValue(t *testing.T) {
|
func TestAccProviderConfigureAnInvalidValue(t *testing.T) {
|
||||||
|
|
@ -356,25 +310,21 @@ func TestAccProviderConfigureAnInvalidValue(t *testing.T) {
|
||||||
if v := os.Getenv(resource.EnvTfAcc); v == "" {
|
if v := os.Getenv(resource.EnvTfAcc); v == "" {
|
||||||
t.Skipf(
|
t.Skipf(
|
||||||
"Acceptance tests skipped unless env '%s' set",
|
"Acceptance tests skipped unless env '%s' set",
|
||||||
resource.EnvTfAcc,
|
resource.EnvTfAcc)
|
||||||
)
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
t.Setenv("STACKIT_CREDENTIALS_PATH", "")
|
t.Setenv("STACKIT_CREDENTIALS_PATH", "")
|
||||||
tempHomeFolder := testutils.CreateTemporaryHome(true, t)
|
tempHomeFolder := testutils.CreateTemporaryHome(true, t)
|
||||||
defer testutils.CleanupTemporaryHome(tempHomeFolder, t)
|
defer testutils.CleanupTemporaryHome(tempHomeFolder, t)
|
||||||
resource.Test(
|
resource.Test(t, resource.TestCase{
|
||||||
t, resource.TestCase{
|
|
||||||
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
||||||
Steps: []resource.TestStep{
|
Steps: []resource.TestStep{
|
||||||
{
|
{ // invalid test attribute should throw an error
|
||||||
// invalid test attribute should throw an error
|
|
||||||
ConfigVariables: testConfigProviderCredentials,
|
ConfigVariables: testConfigProviderCredentials,
|
||||||
Config: providerInvalidAttribute,
|
Config: providerInvalidAttribute,
|
||||||
ExpectError: regexp.MustCompile(`An argument named "test" is not expected here\.`),
|
ExpectError: regexp.MustCompile(`An argument named "test" is not expected here\.`),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
})
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,3 @@
|
||||||
//go:build tools
|
|
||||||
|
|
||||||
package tools
|
package tools
|
||||||
|
|
||||||
// Format Terraform code for use in documentation.
|
// Format Terraform code for use in documentation.
|
||||||
|
|
@ -9,11 +7,3 @@ package tools
|
||||||
|
|
||||||
// Generate documentation.
|
// Generate documentation.
|
||||||
//go:generate go run github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs generate --provider-dir .. -provider-name stackitprivatepreview
|
//go:generate go run github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs generate --provider-dir .. -provider-name stackitprivatepreview
|
||||||
|
|
||||||
import (
|
|
||||||
_ "github.com/golangci/golangci-lint/v2/cmd/golangci-lint"
|
|
||||||
_ "github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework"
|
|
||||||
_ "github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi"
|
|
||||||
_ "github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs"
|
|
||||||
_ "golang.org/x/tools/cmd/goimports"
|
|
||||||
)
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue