fix: linting (#77)
## Description
<!-- **Please link some issue here describing what you are trying to achieve.**
In case there is no issue present for your PR, please consider creating one.
At least please give us some description what you are trying to achieve and why your change is needed. -->
relates to #1234
## Checklist
- [ ] Issue was linked above
- [ ] Code format was applied: `make fmt`
- [ ] Examples were added / adjusted (see `examples/` directory)
- [x] Docs are up-to-date: `make generate-docs` (will be checked by CI)
- [ ] Unit tests got implemented or updated
- [ ] Acceptance tests got implemented or updated (see e.g. [here](f5f99d1709/stackit/internal/services/dns/dns_acc_test.go))
- [x] Unit tests are passing: `make test` (will be checked by CI)
- [x] No linter issues: `make lint` (will be checked by CI)
Reviewed-on: #77
Co-authored-by: Andre Harms <andre.harms@stackit.cloud>
Co-committed-by: Andre Harms <andre.harms@stackit.cloud>
This commit is contained in:
parent
36eccc52c3
commit
4a2819787d
74 changed files with 3010 additions and 2432 deletions
6
Makefile
6
Makefile
|
|
@ -12,9 +12,10 @@ project-tools:
|
|||
# LINT
|
||||
lint-golangci-lint:
|
||||
@echo "Linting with golangci-lint"
|
||||
@$(SCRIPTS_BASE)/lint-golangci-lint.sh
|
||||
@go run github.com/golangci/golangci-lint/v2/cmd/golangci-lint run --fix --config golang-ci.yaml
|
||||
|
||||
lint-tf:
|
||||
|
||||
lint-tf:
|
||||
@echo "Linting terraform files"
|
||||
@terraform fmt -check -diff -recursive
|
||||
|
||||
|
|
@ -23,6 +24,7 @@ lint: lint-golangci-lint lint-tf
|
|||
# DOCUMENTATION GENERATION
|
||||
generate-docs:
|
||||
@echo "Generating documentation with tfplugindocs"
|
||||
|
||||
@$(SCRIPTS_BASE)/tfplugindocs.sh
|
||||
|
||||
build:
|
||||
|
|
|
|||
|
|
@ -60,7 +60,7 @@ func (b *Builder) Build() error {
|
|||
|
||||
if !b.PackagesOnly {
|
||||
slog.Info(" ... Checking needed commands available")
|
||||
err := checkCommands([]string{"tfplugingen-framework", "tfplugingen-openapi"})
|
||||
err := checkCommands([]string{})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
@ -111,7 +111,7 @@ func (b *Builder) Build() error {
|
|||
}
|
||||
|
||||
slog.Info("Creating OAS dir")
|
||||
err = os.MkdirAll(path.Join(genDir, "oas"), 0755)
|
||||
err = os.MkdirAll(path.Join(genDir, "oas"), 0o755) //nolint:gosec // this dir is not sensitive, so we can use 0755
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
@ -158,7 +158,17 @@ func (b *Builder) Build() error {
|
|||
if err = cmd.Wait(); err != nil {
|
||||
var exitErr *exec.ExitError
|
||||
if errors.As(err, &exitErr) {
|
||||
slog.Error("cmd.Wait", "code", exitErr.ExitCode(), "error", err, "stdout", stdOut.String(), "stderr", stdErr.String())
|
||||
slog.Error(
|
||||
"cmd.Wait",
|
||||
"code",
|
||||
exitErr.ExitCode(),
|
||||
"error",
|
||||
err,
|
||||
"stdout",
|
||||
stdOut.String(),
|
||||
"stderr",
|
||||
stdErr.String(),
|
||||
)
|
||||
return fmt.Errorf("%s", stdErr.String())
|
||||
}
|
||||
if err != nil {
|
||||
|
|
@ -192,7 +202,11 @@ func (b *Builder) Build() error {
|
|||
}
|
||||
|
||||
slog.Info("Rearranging package directories")
|
||||
err = os.MkdirAll(path.Join(*root, "pkg_gen"), 0755) // noqa:gosec
|
||||
//nolint:gosec // this dir is not sensitive, so we can use 0755
|
||||
err = os.MkdirAll(
|
||||
path.Join(*root, "pkg_gen"),
|
||||
0o755,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
@ -202,20 +216,21 @@ func (b *Builder) Build() error {
|
|||
return err
|
||||
}
|
||||
for _, item := range items {
|
||||
if item.IsDir() {
|
||||
slog.Info(" -> package", "name", item.Name())
|
||||
tgtDir := path.Join(*root, "pkg_gen", item.Name())
|
||||
if fileExists(tgtDir) {
|
||||
delErr := os.RemoveAll(tgtDir)
|
||||
if delErr != nil {
|
||||
return delErr
|
||||
}
|
||||
}
|
||||
err = os.Rename(path.Join(srcDir, item.Name()), tgtDir)
|
||||
if err != nil {
|
||||
return err
|
||||
if !item.IsDir() {
|
||||
continue
|
||||
}
|
||||
slog.Info(" -> package", "name", item.Name())
|
||||
tgtDir := path.Join(*root, "pkg_gen", item.Name())
|
||||
if fileExists(tgtDir) {
|
||||
delErr := os.RemoveAll(tgtDir)
|
||||
if delErr != nil {
|
||||
return delErr
|
||||
}
|
||||
}
|
||||
err = os.Rename(path.Join(srcDir, item.Name()), tgtDir)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if !b.PackagesOnly {
|
||||
|
|
@ -275,8 +290,8 @@ type templateData struct {
|
|||
Fields []string
|
||||
}
|
||||
|
||||
func fileExists(path string) bool {
|
||||
_, err := os.Stat(path)
|
||||
func fileExists(pathValue string) bool {
|
||||
_, err := os.Stat(pathValue)
|
||||
if os.IsNotExist(err) {
|
||||
return false
|
||||
}
|
||||
|
|
@ -312,10 +327,22 @@ func createBoilerplate(rootFolder, folder string) error {
|
|||
|
||||
resourceName := res.Name()
|
||||
|
||||
dsFile := path.Join(folder, svc.Name(), res.Name(), "datasources_gen", fmt.Sprintf("%s_data_source_gen.go", res.Name()))
|
||||
dsFile := path.Join(
|
||||
folder,
|
||||
svc.Name(),
|
||||
res.Name(),
|
||||
"datasources_gen",
|
||||
fmt.Sprintf("%s_data_source_gen.go", res.Name()),
|
||||
)
|
||||
handleDS = fileExists(dsFile)
|
||||
|
||||
resFile := path.Join(folder, svc.Name(), res.Name(), "resources_gen", fmt.Sprintf("%s_resource_gen.go", res.Name()))
|
||||
resFile := path.Join(
|
||||
folder,
|
||||
svc.Name(),
|
||||
res.Name(),
|
||||
"resources_gen",
|
||||
fmt.Sprintf("%s_resource_gen.go", res.Name()),
|
||||
)
|
||||
handleRes = fileExists(resFile)
|
||||
|
||||
dsGoFile := path.Join(folder, svc.Name(), res.Name(), "datasource.go")
|
||||
|
|
@ -407,7 +434,6 @@ func createBoilerplate(rootFolder, folder string) error {
|
|||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -416,7 +442,7 @@ func createBoilerplate(rootFolder, folder string) error {
|
|||
}
|
||||
|
||||
func ucfirst(s string) string {
|
||||
if len(s) == 0 {
|
||||
if s == "" {
|
||||
return ""
|
||||
}
|
||||
return strings.ToUpper(s[:1]) + s[1:]
|
||||
|
|
@ -451,8 +477,8 @@ func writeTemplateToFile(tplName, tplFile, outFile string, data *templateData) e
|
|||
}
|
||||
|
||||
func generateServiceFiles(rootDir, generatorDir string) error {
|
||||
// slog.Info("Generating specs folder")
|
||||
err := os.MkdirAll(path.Join(rootDir, "generated", "specs"), 0755)
|
||||
//nolint:gosec // this file is not sensitive, so we can use 0755
|
||||
err := os.MkdirAll(path.Join(rootDir, "generated", "specs"), 0o755)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
@ -490,7 +516,6 @@ func generateServiceFiles(rootDir, generatorDir string) error {
|
|||
continue
|
||||
}
|
||||
|
||||
// slog.Info("Checking spec", "name", spec.Name())
|
||||
r := regexp.MustCompile(`^(.*)_config.yml$`)
|
||||
matches := r.FindAllStringSubmatch(specFile.Name(), -1)
|
||||
if matches != nil {
|
||||
|
|
@ -506,27 +531,44 @@ func generateServiceFiles(rootDir, generatorDir string) error {
|
|||
resource,
|
||||
)
|
||||
|
||||
oasFile := path.Join(generatorDir, "oas", fmt.Sprintf("%s%s.json", service.Name(), svcVersion.Name()))
|
||||
oasFile := path.Join(
|
||||
generatorDir,
|
||||
"oas",
|
||||
fmt.Sprintf("%s%s.json", service.Name(), svcVersion.Name()),
|
||||
)
|
||||
if _, oasErr := os.Stat(oasFile); os.IsNotExist(oasErr) {
|
||||
slog.Warn(" could not find matching oas", "svc", service.Name(), "version", svcVersion.Name())
|
||||
slog.Warn(
|
||||
" could not find matching oas",
|
||||
"svc",
|
||||
service.Name(),
|
||||
"version",
|
||||
svcVersion.Name(),
|
||||
)
|
||||
continue
|
||||
}
|
||||
|
||||
scName := fmt.Sprintf("%s%s", service.Name(), svcVersion.Name())
|
||||
scName = strings.ReplaceAll(scName, "-", "")
|
||||
err = os.MkdirAll(path.Join(rootDir, "generated", "internal", "services", scName, resource), 0755)
|
||||
//nolint:gosec // this file is not sensitive, so we can use 0755
|
||||
err = os.MkdirAll(path.Join(rootDir, "generated", "internal", "services", scName, resource), 0o755)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// slog.Info("Generating openapi spec json")
|
||||
specJsonFile := path.Join(rootDir, "generated", "specs", fmt.Sprintf("%s_%s_spec.json", scName, resource))
|
||||
specJsonFile := path.Join(
|
||||
rootDir,
|
||||
"generated",
|
||||
"specs",
|
||||
fmt.Sprintf("%s_%s_spec.json", scName, resource),
|
||||
)
|
||||
|
||||
var stdOut, stdErr bytes.Buffer
|
||||
|
||||
// noqa:gosec
|
||||
// nolint:gosec // #nosec this command is not using any untrusted input, so we can ignore gosec warning
|
||||
cmd := exec.Command(
|
||||
"tfplugingen-openapi",
|
||||
"go",
|
||||
"run",
|
||||
"github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi",
|
||||
"generate",
|
||||
"--config",
|
||||
path.Join(rootDir, "service_specs", service.Name(), svcVersion.Name(), fileName),
|
||||
|
|
@ -553,11 +595,29 @@ func generateServiceFiles(rootDir, generatorDir string) error {
|
|||
if err = cmd.Wait(); err != nil {
|
||||
var exitErr *exec.ExitError
|
||||
if errors.As(err, &exitErr) {
|
||||
slog.Error("tfplugingen-openapi generate", "code", exitErr.ExitCode(), "error", err, "stdout", stdOut.String(), "stderr", stdErr.String())
|
||||
slog.Error(
|
||||
"tfplugingen-openapi generate",
|
||||
"code",
|
||||
exitErr.ExitCode(),
|
||||
"error",
|
||||
err,
|
||||
"stdout",
|
||||
stdOut.String(),
|
||||
"stderr",
|
||||
stdErr.String(),
|
||||
)
|
||||
return fmt.Errorf("%s", stdErr.String())
|
||||
}
|
||||
if err != nil {
|
||||
slog.Error("tfplugingen-openapi generate", "err", err, "stdout", stdOut.String(), "stderr", stdErr.String())
|
||||
slog.Error(
|
||||
"tfplugingen-openapi generate",
|
||||
"err",
|
||||
err,
|
||||
"stdout",
|
||||
stdOut.String(),
|
||||
"stderr",
|
||||
stdErr.String(),
|
||||
)
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
|
@ -565,18 +625,26 @@ func generateServiceFiles(rootDir, generatorDir string) error {
|
|||
slog.Warn(" command output", "stdout", stdOut.String(), "stderr", stdErr.String())
|
||||
}
|
||||
|
||||
// slog.Info("Creating terraform svc resource files folder")
|
||||
tgtFolder := path.Join(rootDir, "generated", "internal", "services", scName, resource, "resources_gen")
|
||||
err = os.MkdirAll(tgtFolder, 0755)
|
||||
tgtFolder := path.Join(
|
||||
rootDir,
|
||||
"generated",
|
||||
"internal",
|
||||
"services",
|
||||
scName,
|
||||
resource,
|
||||
"resources_gen",
|
||||
)
|
||||
//nolint:gosec // this file is not sensitive, so we can use 0755
|
||||
err = os.MkdirAll(tgtFolder, 0o755)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// slog.Info("Generating terraform svc resource files")
|
||||
|
||||
// noqa:gosec
|
||||
// nolint:gosec // #nosec this command is not using any untrusted input, so we can ignore gosec warning
|
||||
cmd2 := exec.Command(
|
||||
"tfplugingen-framework",
|
||||
"go",
|
||||
"run",
|
||||
"github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework",
|
||||
"generate",
|
||||
"resources",
|
||||
"--input",
|
||||
|
|
@ -597,27 +665,53 @@ func generateServiceFiles(rootDir, generatorDir string) error {
|
|||
if err = cmd2.Wait(); err != nil {
|
||||
var exitErr *exec.ExitError
|
||||
if errors.As(err, &exitErr) {
|
||||
slog.Error("tfplugingen-framework generate resources", "code", exitErr.ExitCode(), "error", err, "stdout", stdOut.String(), "stderr", stdErr.String())
|
||||
slog.Error(
|
||||
"tfplugingen-framework generate resources",
|
||||
"code",
|
||||
exitErr.ExitCode(),
|
||||
"error",
|
||||
err,
|
||||
"stdout",
|
||||
stdOut.String(),
|
||||
"stderr",
|
||||
stdErr.String(),
|
||||
)
|
||||
return fmt.Errorf("%s", stdErr.String())
|
||||
}
|
||||
if err != nil {
|
||||
slog.Error("tfplugingen-framework generate resources", "err", err, "stdout", stdOut.String(), "stderr", stdErr.String())
|
||||
slog.Error(
|
||||
"tfplugingen-framework generate resources",
|
||||
"err",
|
||||
err,
|
||||
"stdout",
|
||||
stdOut.String(),
|
||||
"stderr",
|
||||
stdErr.String(),
|
||||
)
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// slog.Info("Creating terraform svc datasource files folder")
|
||||
tgtFolder = path.Join(rootDir, "generated", "internal", "services", scName, resource, "datasources_gen")
|
||||
err = os.MkdirAll(tgtFolder, 0755)
|
||||
tgtFolder = path.Join(
|
||||
rootDir,
|
||||
"generated",
|
||||
"internal",
|
||||
"services",
|
||||
scName,
|
||||
resource,
|
||||
"datasources_gen",
|
||||
)
|
||||
//nolint:gosec // this directory is not sensitive, so we can use 0755
|
||||
err = os.MkdirAll(tgtFolder, 0o755)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// slog.Info("Generating terraform svc resource files")
|
||||
|
||||
// noqa:gosec
|
||||
// nolint:gosec // #nosec this command is not using any untrusted input, so we can ignore gosec warning
|
||||
cmd3 := exec.Command(
|
||||
"tfplugingen-framework",
|
||||
"go",
|
||||
"run",
|
||||
"github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework",
|
||||
"generate",
|
||||
"data-sources",
|
||||
"--input",
|
||||
|
|
@ -639,11 +733,29 @@ func generateServiceFiles(rootDir, generatorDir string) error {
|
|||
if err = cmd3.Wait(); err != nil {
|
||||
var exitErr *exec.ExitError
|
||||
if errors.As(err, &exitErr) {
|
||||
slog.Error("tfplugingen-framework generate data-sources", "code", exitErr.ExitCode(), "error", err, "stdout", stdOut.String(), "stderr", stdErr.String())
|
||||
slog.Error(
|
||||
"tfplugingen-framework generate data-sources",
|
||||
"code",
|
||||
exitErr.ExitCode(),
|
||||
"error",
|
||||
err,
|
||||
"stdout",
|
||||
stdOut.String(),
|
||||
"stderr",
|
||||
stdErr.String(),
|
||||
)
|
||||
return fmt.Errorf("%s", stdErr.String())
|
||||
}
|
||||
if err != nil {
|
||||
slog.Error("tfplugingen-framework generate data-sources", "err", err, "stdout", stdOut.String(), "stderr", stdErr.String())
|
||||
slog.Error(
|
||||
"tfplugingen-framework generate data-sources",
|
||||
"err",
|
||||
err,
|
||||
"stdout",
|
||||
stdOut.String(),
|
||||
"stderr",
|
||||
stdErr.String(),
|
||||
)
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
|
@ -674,10 +786,10 @@ func handleTfTagForDatasourceFile(filePath, service, resource string) error {
|
|||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
root, err := getRoot()
|
||||
if err != nil {
|
||||
//nolint:gocritic // in this case, we want to log the error and exit, as we cannot proceed without the root directory
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
|
|
@ -685,7 +797,6 @@ func handleTfTagForDatasourceFile(filePath, service, resource string) error {
|
|||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer tmp.Close()
|
||||
|
||||
sc := bufio.NewScanner(f)
|
||||
for sc.Scan() {
|
||||
|
|
@ -709,6 +820,7 @@ func handleTfTagForDatasourceFile(filePath, service, resource string) error {
|
|||
return err
|
||||
}
|
||||
|
||||
//nolint:gosec // path traversal is not a concern here
|
||||
if err := os.Rename(tmp.Name(), filePath); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
|
@ -773,13 +885,23 @@ func copyFile(src, dst string) (int64, error) {
|
|||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
defer source.Close()
|
||||
defer func(source *os.File) {
|
||||
err := source.Close()
|
||||
if err != nil {
|
||||
slog.Error("copyFile", "err", err)
|
||||
}
|
||||
}(source)
|
||||
|
||||
destination, err := os.Create(dst)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
defer destination.Close()
|
||||
defer func(destination *os.File) {
|
||||
err := destination.Close()
|
||||
if err != nil {
|
||||
slog.Error("copyFile", "err", err)
|
||||
}
|
||||
}(destination)
|
||||
nBytes, err := io.Copy(destination, source)
|
||||
return nBytes, err
|
||||
}
|
||||
|
|
@ -790,10 +912,8 @@ func getOnlyLatest(m map[string]version) (map[string]version, error) {
|
|||
item, ok := tmpMap[k]
|
||||
if !ok {
|
||||
tmpMap[k] = v
|
||||
} else {
|
||||
if item.major == v.major && item.minor < v.minor {
|
||||
tmpMap[k] = v
|
||||
}
|
||||
} else if item.major == v.major && item.minor < v.minor {
|
||||
tmpMap[k] = v
|
||||
}
|
||||
}
|
||||
return tmpMap, nil
|
||||
|
|
@ -807,18 +927,19 @@ func getVersions(dir string) (map[string]version, error) {
|
|||
}
|
||||
|
||||
for _, entry := range children {
|
||||
if entry.IsDir() {
|
||||
versions, err := os.ReadDir(path.Join(dir, "services", entry.Name()))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
m, err2 := extractVersions(entry.Name(), versions)
|
||||
if err2 != nil {
|
||||
return m, err2
|
||||
}
|
||||
for k, v := range m {
|
||||
res[k] = v
|
||||
}
|
||||
if !entry.IsDir() {
|
||||
continue
|
||||
}
|
||||
versions, err := os.ReadDir(path.Join(dir, "services", entry.Name()))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
m, err2 := extractVersions(entry.Name(), versions)
|
||||
if err2 != nil {
|
||||
return m, err2
|
||||
}
|
||||
for k, v := range m {
|
||||
res[k] = v
|
||||
}
|
||||
}
|
||||
return res, nil
|
||||
|
|
@ -827,20 +948,21 @@ func getVersions(dir string) (map[string]version, error) {
|
|||
func extractVersions(service string, versionDirs []os.DirEntry) (map[string]version, error) {
|
||||
res := make(map[string]version)
|
||||
for _, vDir := range versionDirs {
|
||||
if vDir.IsDir() {
|
||||
r := regexp.MustCompile(`v([0-9]+)([a-z]+)([0-9]*)`)
|
||||
matches := r.FindAllStringSubmatch(vDir.Name(), -1)
|
||||
if matches == nil {
|
||||
continue
|
||||
}
|
||||
svc, ver, err := handleVersion(service, matches[0])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !vDir.IsDir() {
|
||||
continue
|
||||
}
|
||||
r := regexp.MustCompile(`v(\d+)([a-z]+)(\d*)`)
|
||||
matches := r.FindAllStringSubmatch(vDir.Name(), -1)
|
||||
if matches == nil {
|
||||
continue
|
||||
}
|
||||
svc, ver, err := handleVersion(service, matches[0])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if svc != nil && ver != nil {
|
||||
res[*svc] = *ver
|
||||
}
|
||||
if svc != nil && ver != nil {
|
||||
res[*svc] = *ver
|
||||
}
|
||||
}
|
||||
return res, nil
|
||||
|
|
@ -927,30 +1049,25 @@ func getTokens(fileName string) ([]string, error) {
|
|||
return nil, err
|
||||
}
|
||||
|
||||
ast.Inspect(node, func(n ast.Node) bool {
|
||||
// Suche nach Typ-Deklarationen (structs)
|
||||
ts, ok := n.(*ast.TypeSpec)
|
||||
if ok {
|
||||
if strings.Contains(ts.Name.Name, "Model") {
|
||||
// fmt.Printf("found model: %s\n", ts.Name.Name)
|
||||
ast.Inspect(ts, func(sn ast.Node) bool {
|
||||
tts, tok := sn.(*ast.Field)
|
||||
if tok {
|
||||
// fmt.Printf(" found: %+v\n", tts.Names[0])
|
||||
// spew.Dump(tts.Type)
|
||||
|
||||
result = append(result, tts.Names[0].String())
|
||||
|
||||
// fld, fldOk := tts.Type.(*ast.Ident)
|
||||
//if fldOk {
|
||||
// fmt.Printf("type: %+v\n", fld)
|
||||
//}
|
||||
}
|
||||
return true
|
||||
})
|
||||
ast.Inspect(
|
||||
node, func(n ast.Node) bool {
|
||||
// Suche nach Typ-Deklarationen (structs)
|
||||
ts, ok := n.(*ast.TypeSpec)
|
||||
if ok {
|
||||
if strings.Contains(ts.Name.Name, "Model") {
|
||||
ast.Inspect(
|
||||
ts, func(sn ast.Node) bool {
|
||||
tts, tok := sn.(*ast.Field)
|
||||
if tok {
|
||||
result = append(result, tts.Names[0].String())
|
||||
}
|
||||
return true
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
return true
|
||||
},
|
||||
)
|
||||
return result, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ package build
|
|||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"log/slog"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"syscall"
|
||||
|
|
@ -74,14 +75,24 @@ func Copy(srcFile, dstFile string) error {
|
|||
return err
|
||||
}
|
||||
|
||||
defer out.Close()
|
||||
defer func(out *os.File) {
|
||||
err := out.Close()
|
||||
if err != nil {
|
||||
slog.Error("failed to close file", slog.Any("err", err))
|
||||
}
|
||||
}(out)
|
||||
|
||||
in, err := os.Open(srcFile)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer in.Close()
|
||||
defer func(in *os.File) {
|
||||
err := in.Close()
|
||||
if err != nil {
|
||||
slog.Error("error closing destination file", slog.Any("err", err))
|
||||
}
|
||||
}(in)
|
||||
|
||||
_, err = io.Copy(out, in)
|
||||
if err != nil {
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ var buildCmd = &cobra.Command{
|
|||
Use: "build",
|
||||
Short: "Build the necessary boilerplate",
|
||||
Long: `...`,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
b := build.Builder{
|
||||
SkipClone: skipClone,
|
||||
SkipCleanup: skipCleanup,
|
||||
|
|
@ -30,7 +30,7 @@ func NewBuildCmd() *cobra.Command {
|
|||
return buildCmd
|
||||
}
|
||||
|
||||
func init() { // nolint: gochecknoinits
|
||||
func init() { //nolint:gochecknoinits // This is the standard way to set up Cobra commands
|
||||
buildCmd.Flags().BoolVarP(&skipCleanup, "skip-clean", "c", false, "Skip cleanup steps")
|
||||
buildCmd.Flags().BoolVarP(&skipClone, "skip-clone", "g", false, "Skip cloning from git")
|
||||
buildCmd.Flags().BoolVarP(&packagesOnly, "packages-only", "p", false, "Only generate packages")
|
||||
|
|
|
|||
|
|
@ -12,16 +12,15 @@ var examplesCmd = &cobra.Command{
|
|||
Use: "examples",
|
||||
Short: "create examples",
|
||||
Long: `...`,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
|
||||
//filePathStr := "stackit/internal/services/postgresflexalpha/database/datasources_gen/database_data_source_gen.go"
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
// filePathStr := "stackit/internal/services/postgresflexalpha/database/datasources_gen/database_data_source_gen.go"
|
||||
//
|
||||
//src, err := os.ReadFile(filePathStr)
|
||||
//if err != nil {
|
||||
// src, err := os.ReadFile(filePathStr)
|
||||
// if err != nil {
|
||||
// return err
|
||||
//}
|
||||
//
|
||||
//i := interp.New(
|
||||
// i := interp.New(
|
||||
// interp.Options{
|
||||
// GoPath: "/home/henselinm/.asdf/installs/golang/1.25.6/packages",
|
||||
// BuildTags: nil,
|
||||
|
|
@ -34,46 +33,46 @@ var examplesCmd = &cobra.Command{
|
|||
// Unrestricted: false,
|
||||
// },
|
||||
//)
|
||||
//err = i.Use(i.Symbols("github.com/hashicorp/terraform-plugin-framework-validators"))
|
||||
//if err != nil {
|
||||
// err = i.Use(i.Symbols("github.com/hashicorp/terraform-plugin-framework-validators"))
|
||||
// if err != nil {
|
||||
// return err
|
||||
//}
|
||||
//err = i.Use(stdlib.Symbols)
|
||||
//if err != nil {
|
||||
// err = i.Use(stdlib.Symbols)
|
||||
// if err != nil {
|
||||
// return err
|
||||
//}
|
||||
//_, err = i.Eval(string(src))
|
||||
//if err != nil {
|
||||
// _, err = i.Eval(string(src))
|
||||
// if err != nil {
|
||||
// return err
|
||||
//}
|
||||
//
|
||||
//v, err := i.Eval("DatabaseDataSourceSchema")
|
||||
//if err != nil {
|
||||
// v, err := i.Eval("DatabaseDataSourceSchema")
|
||||
// if err != nil {
|
||||
// return err
|
||||
//}
|
||||
//
|
||||
//bar := v.Interface().(func(string) string)
|
||||
// bar := v.Interface().(func(string) string)
|
||||
//
|
||||
//r := bar("Kung")
|
||||
//println(r)
|
||||
// r := bar("Kung")
|
||||
// println(r)
|
||||
//
|
||||
//evalPath, err := i.EvalPath(filePathStr)
|
||||
//if err != nil {
|
||||
// evalPath, err := i.EvalPath(filePathStr)
|
||||
// if err != nil {
|
||||
// return err
|
||||
//}
|
||||
//
|
||||
//fmt.Printf("%+v\n", evalPath)
|
||||
// fmt.Printf("%+v\n", evalPath)
|
||||
|
||||
//_, err = i.Eval(`import "fmt"`)
|
||||
//if err != nil {
|
||||
// _, err = i.Eval(`import "fmt"`)
|
||||
// if err != nil {
|
||||
// return err
|
||||
//}
|
||||
//_, err = i.Eval(`func Hallo() { fmt.Println("Hi!") }`)
|
||||
//if err != nil {
|
||||
// _, err = i.Eval(`func Hallo() { fmt.Println("Hi!") }`)
|
||||
// if err != nil {
|
||||
// return err
|
||||
//}
|
||||
|
||||
//v = i.Symbols("Hallo")
|
||||
// v = i.Symbols("Hallo")
|
||||
|
||||
// fmt.Println(v)
|
||||
return workServices()
|
||||
|
|
@ -110,6 +109,6 @@ func NewExamplesCmd() *cobra.Command {
|
|||
return examplesCmd
|
||||
}
|
||||
|
||||
//func init() { // nolint: gochecknoinits
|
||||
// func init() { // nolint: gochecknoinits
|
||||
// examplesCmd.Flags().BoolVarP(&example, "example", "e", false, "example")
|
||||
//}
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ var getFieldsCmd = &cobra.Command{
|
|||
Use: "get-fields",
|
||||
Short: "get fields from file",
|
||||
Long: `...`,
|
||||
PreRunE: func(cmd *cobra.Command, args []string) error {
|
||||
PreRunE: func(_ *cobra.Command, _ []string) error {
|
||||
typeStr := "data_source"
|
||||
if resType != "resource" && resType != "datasource" {
|
||||
return fmt.Errorf("--type can only be resource or datasource")
|
||||
|
|
@ -76,13 +76,13 @@ var getFieldsCmd = &cobra.Command{
|
|||
|
||||
//// Enum check
|
||||
// switch format {
|
||||
//case "json", "yaml":
|
||||
// case "json", "yaml":
|
||||
//default:
|
||||
// return fmt.Errorf("invalid --format: %s (want json|yaml)", format)
|
||||
//}
|
||||
return nil
|
||||
},
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
return getFields(filePath)
|
||||
},
|
||||
}
|
||||
|
|
@ -107,31 +107,26 @@ func getTokens(fileName string) ([]string, error) {
|
|||
return nil, err
|
||||
}
|
||||
|
||||
ast.Inspect(node, func(n ast.Node) bool {
|
||||
// Suche nach Typ-Deklarationen (structs)
|
||||
ts, ok := n.(*ast.TypeSpec)
|
||||
if ok {
|
||||
if strings.Contains(ts.Name.Name, "Model") {
|
||||
// fmt.Printf("found model: %s\n", ts.Name.Name)
|
||||
ast.Inspect(ts, func(sn ast.Node) bool {
|
||||
tts, tok := sn.(*ast.Field)
|
||||
if tok {
|
||||
// fmt.Printf(" found: %+v\n", tts.Names[0])
|
||||
// spew.Dump(tts.Type)
|
||||
|
||||
result = append(result, tts.Names[0].String())
|
||||
|
||||
// fld, fldOk := tts.Type.(*ast.Ident)
|
||||
//if fldOk {
|
||||
// fmt.Printf("type: %+v\n", fld)
|
||||
//}
|
||||
}
|
||||
return true
|
||||
})
|
||||
ast.Inspect(
|
||||
node, func(n ast.Node) bool {
|
||||
// Suche nach Typ-Deklarationen (structs)
|
||||
ts, ok := n.(*ast.TypeSpec)
|
||||
if ok {
|
||||
if strings.Contains(ts.Name.Name, "Model") {
|
||||
ast.Inspect(
|
||||
ts, func(sn ast.Node) bool {
|
||||
tts, tok := sn.(*ast.Field)
|
||||
if tok {
|
||||
result = append(result, tts.Names[0].String())
|
||||
}
|
||||
return true
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
return true
|
||||
},
|
||||
)
|
||||
return result, nil
|
||||
}
|
||||
|
||||
|
|
@ -139,9 +134,15 @@ func NewGetFieldsCmd() *cobra.Command {
|
|||
return getFieldsCmd
|
||||
}
|
||||
|
||||
func init() { // nolint: gochecknoinits
|
||||
func init() { //nolint:gochecknoinits //this is the only way to add the command to the rootCmd
|
||||
getFieldsCmd.Flags().StringVarP(&inFile, "infile", "i", "", "input filename incl path")
|
||||
getFieldsCmd.Flags().StringVarP(&svcName, "service", "s", "", "service name")
|
||||
getFieldsCmd.Flags().StringVarP(&resName, "resource", "r", "", "resource name")
|
||||
getFieldsCmd.Flags().StringVarP(&resType, "type", "t", "resource", "resource type (data-source or resource [default])")
|
||||
getFieldsCmd.Flags().StringVarP(
|
||||
&resType,
|
||||
"type",
|
||||
"t",
|
||||
"resource",
|
||||
"resource type (data-source or resource [default])",
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -35,36 +35,27 @@ type GpgPublicKey struct {
|
|||
}
|
||||
|
||||
func (p *Provider) CreateArchitectureFiles() error {
|
||||
// var namespace, provider, distPath, repoName, version, gpgFingerprint, gpgPubKeyFile, domain string
|
||||
|
||||
log.Println("* Creating architecture files in target directories")
|
||||
|
||||
// filename = terraform-provider-[provider]_0.0.1_darwin_amd64.zip - provider_name + version + target + architecture + .zip
|
||||
// prefix := fmt.Sprintf("v1/providers/%s/%s/%s/", namespace, provider, version)
|
||||
prefix := path.Join("v1", "providers", p.Namespace, p.Provider, p.Version)
|
||||
|
||||
// pathPrefix := fmt.Sprintf("release/%s", prefix)
|
||||
pathPrefix := path.Join("release", prefix)
|
||||
|
||||
// urlPrefix := fmt.Sprintf("https://%s/%s", domain, prefix)
|
||||
urlPrefix, err := url.JoinPath("https://", p.Domain, prefix)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error creating base url: %w", err)
|
||||
}
|
||||
|
||||
// download url = https://example.com/v1/providers/namespace/provider/0.0.1/download/terraform-provider_0.0.1_darwin_amd64.zip
|
||||
downloadUrlPrefix, err := url.JoinPath(urlPrefix, "download")
|
||||
if err != nil {
|
||||
return fmt.Errorf("error crearting download url: %w", err)
|
||||
}
|
||||
downloadPathPrefix := path.Join(pathPrefix, "download")
|
||||
|
||||
// shasums url = https://example.com/v1/providers/namespace/provider/0.0.1/terraform-provider_0.0.1_SHA256SUMS
|
||||
shasumsUrl, err := url.JoinPath(urlPrefix, fmt.Sprintf("%s_%s_SHA256SUMS", p.RepoName, p.Version))
|
||||
if err != nil {
|
||||
return fmt.Errorf("error creating shasums url: %w", err)
|
||||
}
|
||||
// shasums_signature_url = https://example.com/v1/providers/namespace/provider/0.0.1/terraform-provider_0.0.1_SHA256SUMS.sig
|
||||
shasumsSigUrl := shasumsUrl + ".sig"
|
||||
|
||||
gpgAsciiPub, err := p.ReadGpgFile()
|
||||
|
|
@ -116,33 +107,6 @@ func (p *Provider) CreateArchitectureFiles() error {
|
|||
},
|
||||
},
|
||||
}
|
||||
// var architectureTemplate = []byte(fmt.Sprintf(`
|
||||
//{
|
||||
// "protocols": [
|
||||
// "4.0",
|
||||
// "5.1",
|
||||
// "6.0"
|
||||
// ],
|
||||
// "os": "%s",
|
||||
// "arch": "%s",
|
||||
// "filename": "%s",
|
||||
// "download_url": "%s",
|
||||
// "shasums_url": "%s",
|
||||
// "shasums_signature_url": "%s",
|
||||
// "shasum": "%s",
|
||||
// "signing_keys": {
|
||||
// "gpg_public_keys": [
|
||||
// {
|
||||
// "key_id": "%s",
|
||||
// "ascii_armor": "%s",
|
||||
// "trust_signature": "",
|
||||
// "source": "",
|
||||
// "source_url": ""
|
||||
// }
|
||||
// ]
|
||||
// }
|
||||
//}
|
||||
// `, target, arch, fileName, downloadUrl, shasumsUrl, shasumsSigUrl, shasum, gpgFingerprint, gpgAsciiPub))
|
||||
|
||||
log.Printf(" - Arch file: %s", archFileName)
|
||||
|
||||
|
|
@ -160,8 +124,12 @@ func WriteArchitectureFile(filePath string, arch Architecture) error {
|
|||
if err != nil {
|
||||
return fmt.Errorf("error encoding data: %w", err)
|
||||
}
|
||||
|
||||
err = os.WriteFile(filePath, jsonString, os.ModePerm)
|
||||
//nolint:gosec // this file is not sensitive, so we can use os.ModePerm
|
||||
err = os.WriteFile(
|
||||
filePath,
|
||||
jsonString,
|
||||
os.ModePerm,
|
||||
)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error writing data: %w", err)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -161,10 +161,12 @@ func (p *Provider) createVersionsFile() error {
|
|||
target := fileNameSplit[2]
|
||||
arch := fileNameSplit[3]
|
||||
|
||||
version.Platforms = append(version.Platforms, Platform{
|
||||
OS: target,
|
||||
Arch: arch,
|
||||
})
|
||||
version.Platforms = append(
|
||||
version.Platforms, Platform{
|
||||
OS: target,
|
||||
Arch: arch,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
data := Data{}
|
||||
|
|
@ -206,16 +208,19 @@ func (p *Provider) CreateWellKnown() error {
|
|||
log.Println("* Creating .well-known directory")
|
||||
pathString := path.Join(p.RootPath, "release", ".well-known")
|
||||
|
||||
//nolint:gosec // this file is not sensitive, so we can use ModePerm
|
||||
err := os.MkdirAll(pathString, os.ModePerm)
|
||||
if err != nil && !errors.Is(err, fs.ErrExist) {
|
||||
return fmt.Errorf("error creating '%s' dir: %w", pathString, err)
|
||||
}
|
||||
|
||||
log.Println(" - Writing to .well-known/terraform.json file")
|
||||
|
||||
//nolint:gosec // this file is not sensitive, so we can use 0644
|
||||
err = os.WriteFile(
|
||||
fmt.Sprintf("%s/terraform.json", pathString),
|
||||
[]byte(`{"providers.v1": "/v1/providers/"}`),
|
||||
0644,
|
||||
0o644,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
|
|
@ -224,9 +229,10 @@ func (p *Provider) CreateWellKnown() error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func CreateDir(path string) error {
|
||||
log.Printf("* Creating %s directory", path)
|
||||
err := os.MkdirAll(path, os.ModePerm)
|
||||
func CreateDir(pathValue string) error {
|
||||
log.Printf("* Creating %s directory", pathValue)
|
||||
//nolint:gosec // this file is not sensitive, so we can use ModePerm
|
||||
err := os.MkdirAll(pathValue, os.ModePerm)
|
||||
if errors.Is(err, fs.ErrExist) {
|
||||
return nil
|
||||
}
|
||||
|
|
@ -269,13 +275,23 @@ func CopyFile(src, dst string) (int64, error) {
|
|||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
defer source.Close()
|
||||
defer func(source *os.File) {
|
||||
err := source.Close()
|
||||
if err != nil {
|
||||
slog.Error("error closing source file", slog.Any("err", err))
|
||||
}
|
||||
}(source)
|
||||
|
||||
destination, err := os.Create(dst)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
defer destination.Close()
|
||||
defer func(destination *os.File) {
|
||||
err := destination.Close()
|
||||
if err != nil {
|
||||
slog.Error("error closing destination file", slog.Any("err", err))
|
||||
}
|
||||
}(destination)
|
||||
nBytes, err := io.Copy(destination, source)
|
||||
return nBytes, err
|
||||
}
|
||||
|
|
|
|||
|
|
@ -35,7 +35,12 @@ func (d *Data) WriteToFile(filePath string) error {
|
|||
return fmt.Errorf("error encoding data: %w", err)
|
||||
}
|
||||
|
||||
err = os.WriteFile(filePath, jsonString, os.ModePerm)
|
||||
//nolint:gosec // this file is not sensitive, so we can use os.ModePerm
|
||||
err = os.WriteFile(
|
||||
filePath,
|
||||
jsonString,
|
||||
os.ModePerm,
|
||||
)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error writing data: %w", err)
|
||||
}
|
||||
|
|
@ -86,7 +91,13 @@ func (d *Data) LoadFromUrl(uri string) error {
|
|||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer os.Remove(file.Name()) // Clean up
|
||||
defer func(name string) {
|
||||
//nolint:gosec // The file path is generated by os.CreateTemp and is not user-controllable
|
||||
err := os.Remove(name)
|
||||
if err != nil {
|
||||
slog.Error("failed to remove temporary file", slog.Any("err", err))
|
||||
}
|
||||
}(file.Name()) // Clean up
|
||||
|
||||
err = DownloadFile(
|
||||
u.String(),
|
||||
|
|
@ -123,20 +134,30 @@ func (v *Version) AddProtocol(p string) error {
|
|||
// DownloadFile will download a url and store it in local filepath.
|
||||
// It writes to the destination file as it downloads it, without
|
||||
// loading the entire file into memory.
|
||||
func DownloadFile(url string, filepath string) error {
|
||||
func DownloadFile(urlValue, filepath string) error {
|
||||
// Create the file
|
||||
//nolint:gosec // path traversal is not a concern here, as the filepath is generated by us and not user input
|
||||
out, err := os.Create(filepath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer out.Close()
|
||||
defer func(out *os.File) {
|
||||
err := out.Close()
|
||||
if err != nil {
|
||||
slog.Error("failed to close file", slog.Any("err", err))
|
||||
}
|
||||
}(out)
|
||||
|
||||
// Get the data
|
||||
resp, err := http.Get(url)
|
||||
|
||||
//nolint:gosec,bodyclose // this is a controlled URL, not user input
|
||||
resp, err := http.Get(urlValue)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
defer func(Body io.ReadCloser) {
|
||||
_ = Body.Close()
|
||||
}(resp.Body)
|
||||
|
||||
// Write the body to file
|
||||
_, err = io.Copy(out, resp.Body)
|
||||
|
|
|
|||
|
|
@ -29,20 +29,32 @@ var publishCmd = &cobra.Command{
|
|||
Use: "publish",
|
||||
Short: "Publish terraform provider",
|
||||
Long: `...`,
|
||||
RunE: func(_ *cobra.Command, args []string) error {
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
return publish()
|
||||
},
|
||||
}
|
||||
|
||||
func init() { // nolint: gochecknoinits
|
||||
func init() { //nolint:gochecknoinits //this is the standard way to set up cobra commands
|
||||
publishCmd.Flags().StringVarP(&namespace, "namespace", "n", "", "Namespace for the Terraform registry.")
|
||||
publishCmd.Flags().StringVarP(&domain, "domain", "d", "", "Domain for the Terraform registry.")
|
||||
publishCmd.Flags().StringVarP(&providerName, "providerName", "p", "", "ProviderName for the Terraform registry.")
|
||||
publishCmd.Flags().StringVarP(&distPath, "distPath", "x", "dist", "Dist Path for the Terraform registry.")
|
||||
publishCmd.Flags().StringVarP(&repoName, "repoName", "r", "", "RepoName for the Terraform registry.")
|
||||
publishCmd.Flags().StringVarP(&version, "version", "v", "", "Version for the Terraform registry.")
|
||||
publishCmd.Flags().StringVarP(&gpgFingerprint, "gpgFingerprint", "f", "", "GPG Fingerprint for the Terraform registry.")
|
||||
publishCmd.Flags().StringVarP(&gpgPubKeyFile, "gpgPubKeyFile", "k", "", "GPG PubKey file name for the Terraform registry.")
|
||||
publishCmd.Flags().StringVarP(
|
||||
&gpgFingerprint,
|
||||
"gpgFingerprint",
|
||||
"f",
|
||||
"",
|
||||
"GPG Fingerprint for the Terraform registry.",
|
||||
)
|
||||
publishCmd.Flags().StringVarP(
|
||||
&gpgPubKeyFile,
|
||||
"gpgPubKeyFile",
|
||||
"k",
|
||||
"",
|
||||
"GPG PubKey file name for the Terraform registry.",
|
||||
)
|
||||
|
||||
err := publishCmd.MarkFlagRequired("namespace")
|
||||
if err != nil {
|
||||
|
|
@ -105,6 +117,7 @@ func publish() error {
|
|||
|
||||
// Create release dir - only the contents of this need to be uploaded to S3
|
||||
log.Printf("* Creating release directory")
|
||||
//nolint:gosec // this directory is not sensitive, so we can use 0750
|
||||
err = os.MkdirAll(path.Join(p.RootPath, "release"), os.ModePerm)
|
||||
if err != nil && !errors.Is(err, fs.ErrExist) {
|
||||
return fmt.Errorf("error creating '%s' dir: %w", path.Join(p.RootPath, "release"), err)
|
||||
|
|
|
|||
|
|
@ -1,38 +0,0 @@
|
|||
---
|
||||
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||
page_title: "stackitprivatepreview_postgresflexalpha_database Data Source - stackitprivatepreview"
|
||||
subcategory: ""
|
||||
description: |-
|
||||
|
||||
---
|
||||
|
||||
# stackitprivatepreview_postgresflexalpha_database (Data Source)
|
||||
|
||||
|
||||
|
||||
## Example Usage
|
||||
|
||||
```terraform
|
||||
data "stackitprivatepreview_postgresflexalpha_database" "example" {
|
||||
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
database_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
}
|
||||
```
|
||||
|
||||
<!-- schema generated by tfplugindocs -->
|
||||
## Schema
|
||||
|
||||
### Required
|
||||
|
||||
- `database_id` (Number) The ID of the database.
|
||||
- `instance_id` (String) The ID of the instance.
|
||||
- `project_id` (String) The STACKIT project ID.
|
||||
- `region` (String) The region which should be addressed
|
||||
|
||||
### Read-Only
|
||||
|
||||
- `id` (String) Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`,`database_id`\".",
|
||||
- `name` (String) The name of the database.
|
||||
- `owner` (String) The owner of the database.
|
||||
- `tf_original_api_id` (Number) The id of the database.
|
||||
|
|
@ -1,54 +0,0 @@
|
|||
---
|
||||
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||
page_title: "stackitprivatepreview_postgresflexalpha_flavor Data Source - stackitprivatepreview"
|
||||
subcategory: ""
|
||||
description: |-
|
||||
|
||||
---
|
||||
|
||||
# stackitprivatepreview_postgresflexalpha_flavor (Data Source)
|
||||
|
||||
|
||||
|
||||
## Example Usage
|
||||
|
||||
```terraform
|
||||
data "stackitprivatepreview_postgresflexalpha_flavor" "flavor" {
|
||||
project_id = var.project_id
|
||||
region = var.region
|
||||
cpu = 4
|
||||
ram = 16
|
||||
node_type = "Single"
|
||||
storage_class = "premium-perf2-stackit"
|
||||
}
|
||||
```
|
||||
|
||||
<!-- schema generated by tfplugindocs -->
|
||||
## Schema
|
||||
|
||||
### Required
|
||||
|
||||
- `cpu` (Number) The cpu count of the instance.
|
||||
- `node_type` (String) defines the nodeType it can be either single or replica
|
||||
- `project_id` (String) The cpu count of the instance.
|
||||
- `ram` (Number) The memory of the instance in Gibibyte.
|
||||
- `region` (String) The flavor description.
|
||||
- `storage_class` (String) The memory of the instance in Gibibyte.
|
||||
|
||||
### Read-Only
|
||||
|
||||
- `description` (String) The flavor description.
|
||||
- `flavor_id` (String) The flavor id of the instance flavor.
|
||||
- `id` (String) The terraform id of the instance flavor.
|
||||
- `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte.
|
||||
- `min_gb` (Number) minimum storage which is required to order in Gigabyte.
|
||||
- `storage_classes` (Attributes List) (see [below for nested schema](#nestedatt--storage_classes))
|
||||
|
||||
<a id="nestedatt--storage_classes"></a>
|
||||
### Nested Schema for `storage_classes`
|
||||
|
||||
Read-Only:
|
||||
|
||||
- `class` (String)
|
||||
- `max_io_per_sec` (Number)
|
||||
- `max_through_in_mb` (Number)
|
||||
|
|
@ -1,68 +0,0 @@
|
|||
---
|
||||
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||
page_title: "stackitprivatepreview_postgresflexalpha_flavors Data Source - stackitprivatepreview"
|
||||
subcategory: ""
|
||||
description: |-
|
||||
|
||||
---
|
||||
|
||||
# stackitprivatepreview_postgresflexalpha_flavors (Data Source)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<!-- schema generated by tfplugindocs -->
|
||||
## Schema
|
||||
|
||||
### Required
|
||||
|
||||
- `project_id` (String) The STACKIT project ID.
|
||||
- `region` (String) The region which should be addressed
|
||||
|
||||
### Optional
|
||||
|
||||
- `page` (Number) Number of the page of items list to be returned.
|
||||
- `size` (Number) Number of items to be returned on each page.
|
||||
- `sort` (String) Sorting of the flavors to be returned on each page.
|
||||
|
||||
### Read-Only
|
||||
|
||||
- `flavors` (Attributes List) List of flavors available for the project. (see [below for nested schema](#nestedatt--flavors))
|
||||
- `pagination` (Attributes) (see [below for nested schema](#nestedatt--pagination))
|
||||
|
||||
<a id="nestedatt--flavors"></a>
|
||||
### Nested Schema for `flavors`
|
||||
|
||||
Read-Only:
|
||||
|
||||
- `cpu` (Number) The cpu count of the instance.
|
||||
- `description` (String) The flavor description.
|
||||
- `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte.
|
||||
- `memory` (Number) The memory of the instance in Gibibyte.
|
||||
- `min_gb` (Number) minimum storage which is required to order in Gigabyte.
|
||||
- `node_type` (String) defines the nodeType it can be either single or replica
|
||||
- `storage_classes` (Attributes List) maximum storage which can be ordered for the flavor in Gigabyte. (see [below for nested schema](#nestedatt--flavors--storage_classes))
|
||||
- `tf_original_api_id` (String) The id of the instance flavor.
|
||||
|
||||
<a id="nestedatt--flavors--storage_classes"></a>
|
||||
### Nested Schema for `flavors.storage_classes`
|
||||
|
||||
Read-Only:
|
||||
|
||||
- `class` (String)
|
||||
- `max_io_per_sec` (Number)
|
||||
- `max_through_in_mb` (Number)
|
||||
|
||||
|
||||
|
||||
<a id="nestedatt--pagination"></a>
|
||||
### Nested Schema for `pagination`
|
||||
|
||||
Read-Only:
|
||||
|
||||
- `page` (Number)
|
||||
- `size` (Number)
|
||||
- `sort` (String)
|
||||
- `total_pages` (Number)
|
||||
- `total_rows` (Number)
|
||||
|
|
@ -1,87 +0,0 @@
|
|||
---
|
||||
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||
page_title: "stackitprivatepreview_postgresflexalpha_instance Data Source - stackitprivatepreview"
|
||||
subcategory: ""
|
||||
description: |-
|
||||
|
||||
---
|
||||
|
||||
# stackitprivatepreview_postgresflexalpha_instance (Data Source)
|
||||
|
||||
|
||||
|
||||
## Example Usage
|
||||
|
||||
```terraform
|
||||
data "stackitprivatepreview_postgresflexalpha_instance" "example" {
|
||||
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
}
|
||||
```
|
||||
|
||||
<!-- schema generated by tfplugindocs -->
|
||||
## Schema
|
||||
|
||||
### Required
|
||||
|
||||
- `instance_id` (String) The ID of the instance.
|
||||
- `project_id` (String) The STACKIT project ID.
|
||||
- `region` (String) The region which should be addressed
|
||||
|
||||
### Read-Only
|
||||
|
||||
- `acl` (List of String) List of IPV4 cidr.
|
||||
- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
|
||||
- `connection_info` (Attributes) The DNS name and port in the instance overview (see [below for nested schema](#nestedatt--connection_info))
|
||||
- `encryption` (Attributes) The configuration for instance's volume and backup storage encryption.
|
||||
|
||||
⚠ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected. (see [below for nested schema](#nestedatt--encryption))
|
||||
- `flavor_id` (String) The id of the instance flavor.
|
||||
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
|
||||
- `name` (String) The name of the instance.
|
||||
- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
|
||||
- `replicas` (Number) How many replicas the instance should have.
|
||||
- `retention_days` (Number) How long backups are retained. The value can only be between 32 and 365 days.
|
||||
- `status` (String) The current status of the instance.
|
||||
- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
|
||||
- `tf_original_api_id` (String) The ID of the instance.
|
||||
- `version` (String) The Postgres version used for the instance. See [Versions Endpoint](/documentation/postgres-flex-service/version/v3alpha1#tag/Version) for supported version parameters.
|
||||
|
||||
<a id="nestedatt--connection_info"></a>
|
||||
### Nested Schema for `connection_info`
|
||||
|
||||
Read-Only:
|
||||
|
||||
- `host` (String) The host of the instance.
|
||||
- `port` (Number) The port of the instance.
|
||||
|
||||
|
||||
<a id="nestedatt--encryption"></a>
|
||||
### Nested Schema for `encryption`
|
||||
|
||||
Read-Only:
|
||||
|
||||
- `kek_key_id` (String) The encryption-key key identifier
|
||||
- `kek_key_ring_id` (String) The encryption-key keyring identifier
|
||||
- `kek_key_version` (String) The encryption-key version
|
||||
- `service_account` (String)
|
||||
|
||||
|
||||
<a id="nestedatt--network"></a>
|
||||
### Nested Schema for `network`
|
||||
|
||||
Read-Only:
|
||||
|
||||
- `access_scope` (String) The access scope of the instance. It defines if the instance is public or airgapped.
|
||||
- `acl` (List of String) List of IPV4 cidr.
|
||||
- `instance_address` (String)
|
||||
- `router_address` (String)
|
||||
|
||||
|
||||
<a id="nestedatt--storage"></a>
|
||||
### Nested Schema for `storage`
|
||||
|
||||
Read-Only:
|
||||
|
||||
- `performance_class` (String) The storage class for the storage.
|
||||
- `size` (Number) The storage size in Gigabytes.
|
||||
|
|
@ -1,42 +0,0 @@
|
|||
---
|
||||
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||
page_title: "stackitprivatepreview_postgresflexalpha_user Data Source - stackitprivatepreview"
|
||||
subcategory: ""
|
||||
description: |-
|
||||
|
||||
---
|
||||
|
||||
# stackitprivatepreview_postgresflexalpha_user (Data Source)
|
||||
|
||||
|
||||
|
||||
## Example Usage
|
||||
|
||||
```terraform
|
||||
data "stackitprivatepreview_postgresflexalpha_user" "example" {
|
||||
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
user_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
}
|
||||
```
|
||||
|
||||
<!-- schema generated by tfplugindocs -->
|
||||
## Schema
|
||||
|
||||
### Required
|
||||
|
||||
- `instance_id` (String) The ID of the instance.
|
||||
- `project_id` (String) The STACKIT project ID.
|
||||
- `region` (String) The region which should be addressed
|
||||
- `user_id` (Number) The ID of the user.
|
||||
|
||||
### Optional
|
||||
|
||||
- `id` (String) Terraform's internal resource ID. It is structured as \"`project_id`,`region`,`instance_id`,`user_id`\".",
|
||||
|
||||
### Read-Only
|
||||
|
||||
- `name` (String) The name of the user.
|
||||
- `roles` (List of String) A list of user roles.
|
||||
- `status` (String) The current status of the user.
|
||||
- `tf_original_api_id` (Number) The ID of the user.
|
||||
|
|
@ -1,32 +0,0 @@
|
|||
---
|
||||
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||
page_title: "stackitprivatepreview_sqlserverflexalpha_database Data Source - stackitprivatepreview"
|
||||
subcategory: ""
|
||||
description: |-
|
||||
|
||||
---
|
||||
|
||||
# stackitprivatepreview_sqlserverflexalpha_database (Data Source)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<!-- schema generated by tfplugindocs -->
|
||||
## Schema
|
||||
|
||||
### Required
|
||||
|
||||
- `database_name` (String) The name of the database.
|
||||
- `instance_id` (String) The ID of the instance.
|
||||
- `project_id` (String) The STACKIT project ID.
|
||||
- `region` (String) The region which should be addressed
|
||||
|
||||
### Read-Only
|
||||
|
||||
- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
|
||||
- `compatibility_level` (Number) CompatibilityLevel of the Database.
|
||||
- `id` (String) The terraform internal identifier.
|
||||
- `name` (String) The name of the database.
|
||||
- `owner` (String) The owner of the database.
|
||||
- `tf_original_api_id` (Number) The id of the database.
|
||||
|
|
@ -1,54 +0,0 @@
|
|||
---
|
||||
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||
page_title: "stackitprivatepreview_sqlserverflexalpha_flavor Data Source - stackitprivatepreview"
|
||||
subcategory: ""
|
||||
description: |-
|
||||
|
||||
---
|
||||
|
||||
# stackitprivatepreview_sqlserverflexalpha_flavor (Data Source)
|
||||
|
||||
|
||||
|
||||
## Example Usage
|
||||
|
||||
```terraform
|
||||
data "stackitprivatepreview_sqlserverflexalpha_flavor" "flavor" {
|
||||
project_id = var.project_id
|
||||
region = var.region
|
||||
cpu = 4
|
||||
ram = 16
|
||||
node_type = "Single"
|
||||
storage_class = "premium-perf2-stackit"
|
||||
}
|
||||
```
|
||||
|
||||
<!-- schema generated by tfplugindocs -->
|
||||
## Schema
|
||||
|
||||
### Required
|
||||
|
||||
- `cpu` (Number) The cpu count of the instance.
|
||||
- `node_type` (String) defines the nodeType it can be either single or HA
|
||||
- `project_id` (String) The project ID of the flavor.
|
||||
- `ram` (Number) The memory of the instance in Gibibyte.
|
||||
- `region` (String) The region of the flavor.
|
||||
- `storage_class` (String) The memory of the instance in Gibibyte.
|
||||
|
||||
### Read-Only
|
||||
|
||||
- `description` (String) The flavor description.
|
||||
- `flavor_id` (String) The id of the instance flavor.
|
||||
- `id` (String) The id of the instance flavor.
|
||||
- `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte.
|
||||
- `min_gb` (Number) minimum storage which is required to order in Gigabyte.
|
||||
- `storage_classes` (Attributes List) maximum storage which can be ordered for the flavor in Gigabyte. (see [below for nested schema](#nestedatt--storage_classes))
|
||||
|
||||
<a id="nestedatt--storage_classes"></a>
|
||||
### Nested Schema for `storage_classes`
|
||||
|
||||
Read-Only:
|
||||
|
||||
- `class` (String)
|
||||
- `max_io_per_sec` (Number)
|
||||
- `max_through_in_mb` (Number)
|
||||
|
|
@ -1,77 +0,0 @@
|
|||
---
|
||||
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||
page_title: "stackitprivatepreview_sqlserverflexalpha_instance Data Source - stackitprivatepreview"
|
||||
subcategory: ""
|
||||
description: |-
|
||||
|
||||
---
|
||||
|
||||
# stackitprivatepreview_sqlserverflexalpha_instance (Data Source)
|
||||
|
||||
|
||||
|
||||
## Example Usage
|
||||
|
||||
```terraform
|
||||
data "stackitprivatepreview_sqlserverflexalpha_instance" "example" {
|
||||
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
}
|
||||
```
|
||||
|
||||
<!-- schema generated by tfplugindocs -->
|
||||
## Schema
|
||||
|
||||
### Required
|
||||
|
||||
- `instance_id` (String) The ID of the instance.
|
||||
- `project_id` (String) The STACKIT project ID.
|
||||
- `region` (String) The region which should be addressed
|
||||
|
||||
### Read-Only
|
||||
|
||||
- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
|
||||
- `edition` (String) Edition of the MSSQL server instance
|
||||
- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption))
|
||||
- `flavor_id` (String) The id of the instance flavor.
|
||||
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
|
||||
- `name` (String) The name of the instance.
|
||||
- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
|
||||
- `replicas` (Number) How many replicas the instance should have.
|
||||
- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365
|
||||
- `status` (String)
|
||||
- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
|
||||
- `tf_original_api_id` (String) The ID of the instance.
|
||||
- `version` (String) The sqlserver version used for the instance.
|
||||
|
||||
<a id="nestedatt--encryption"></a>
|
||||
### Nested Schema for `encryption`
|
||||
|
||||
Read-Only:
|
||||
|
||||
- `kek_key_id` (String) The key identifier
|
||||
- `kek_key_ring_id` (String) The keyring identifier
|
||||
- `kek_key_version` (String) The key version
|
||||
- `service_account` (String)
|
||||
|
||||
|
||||
<a id="nestedatt--network"></a>
|
||||
### Nested Schema for `network`
|
||||
|
||||
Read-Only:
|
||||
|
||||
- `access_scope` (String) The network access scope of the instance
|
||||
|
||||
⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.
|
||||
- `acl` (List of String) List of IPV4 cidr.
|
||||
- `instance_address` (String)
|
||||
- `router_address` (String)
|
||||
|
||||
|
||||
<a id="nestedatt--storage"></a>
|
||||
### Nested Schema for `storage`
|
||||
|
||||
Read-Only:
|
||||
|
||||
- `class` (String) The storage class for the storage.
|
||||
- `size` (Number) The storage size in Gigabytes.
|
||||
|
|
@ -1,62 +0,0 @@
|
|||
---
|
||||
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||
page_title: "stackitprivatepreview_sqlserverflexalpha_user Data Source - stackitprivatepreview"
|
||||
subcategory: ""
|
||||
description: |-
|
||||
|
||||
---
|
||||
|
||||
# stackitprivatepreview_sqlserverflexalpha_user (Data Source)
|
||||
|
||||
|
||||
|
||||
## Example Usage
|
||||
|
||||
```terraform
|
||||
data "stackitprivatepreview_sqlserverflexalpha_user" "example" {
|
||||
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
user_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
}
|
||||
```
|
||||
|
||||
<!-- schema generated by tfplugindocs -->
|
||||
## Schema
|
||||
|
||||
### Required
|
||||
|
||||
- `instance_id` (String) The ID of the instance.
|
||||
- `project_id` (String) The STACKIT project ID.
|
||||
- `region` (String) The region which should be addressed
|
||||
|
||||
### Optional
|
||||
|
||||
- `page` (Number) Number of the page of items list to be returned.
|
||||
- `size` (Number) Number of items to be returned on each page.
|
||||
- `sort` (String) Sorting of the users to be returned on each page.
|
||||
|
||||
### Read-Only
|
||||
|
||||
- `pagination` (Attributes) (see [below for nested schema](#nestedatt--pagination))
|
||||
- `users` (Attributes List) List of all users inside an instance (see [below for nested schema](#nestedatt--users))
|
||||
|
||||
<a id="nestedatt--pagination"></a>
|
||||
### Nested Schema for `pagination`
|
||||
|
||||
Read-Only:
|
||||
|
||||
- `page` (Number)
|
||||
- `size` (Number)
|
||||
- `sort` (String)
|
||||
- `total_pages` (Number)
|
||||
- `total_rows` (Number)
|
||||
|
||||
|
||||
<a id="nestedatt--users"></a>
|
||||
### Nested Schema for `users`
|
||||
|
||||
Read-Only:
|
||||
|
||||
- `status` (String) The current status of the user.
|
||||
- `tf_original_api_id` (Number) The ID of the user.
|
||||
- `username` (String) The name of the user.
|
||||
|
|
@ -1,40 +0,0 @@
|
|||
---
|
||||
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||
page_title: "stackitprivatepreview_sqlserverflexbeta_database Data Source - stackitprivatepreview"
|
||||
subcategory: ""
|
||||
description: |-
|
||||
|
||||
---
|
||||
|
||||
# stackitprivatepreview_sqlserverflexbeta_database (Data Source)
|
||||
|
||||
|
||||
|
||||
## Example Usage
|
||||
|
||||
```terraform
|
||||
data "stackitprivatepreview_sqlserverflexbeta_database" "example" {
|
||||
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
database_name = "dbname"
|
||||
}
|
||||
```
|
||||
|
||||
<!-- schema generated by tfplugindocs -->
|
||||
## Schema
|
||||
|
||||
### Required
|
||||
|
||||
- `database_name` (String) The name of the database.
|
||||
- `instance_id` (String) The ID of the instance.
|
||||
- `project_id` (String) The STACKIT project ID.
|
||||
- `region` (String) The region which should be addressed
|
||||
|
||||
### Read-Only
|
||||
|
||||
- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
|
||||
- `compatibility_level` (Number) CompatibilityLevel of the Database.
|
||||
- `id` (String) The terraform internal identifier.
|
||||
- `name` (String) The name of the database.
|
||||
- `owner` (String) The owner of the database.
|
||||
- `tf_original_api_id` (Number) The id of the database.
|
||||
|
|
@ -1,54 +0,0 @@
|
|||
---
|
||||
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||
page_title: "stackitprivatepreview_sqlserverflexbeta_flavor Data Source - stackitprivatepreview"
|
||||
subcategory: ""
|
||||
description: |-
|
||||
|
||||
---
|
||||
|
||||
# stackitprivatepreview_sqlserverflexbeta_flavor (Data Source)
|
||||
|
||||
|
||||
|
||||
## Example Usage
|
||||
|
||||
```terraform
|
||||
data "stackitprivatepreview_sqlserverflexbeta_flavor" "flavor" {
|
||||
project_id = var.project_id
|
||||
region = var.region
|
||||
cpu = 4
|
||||
ram = 16
|
||||
node_type = "Single"
|
||||
storage_class = "premium-perf2-stackit"
|
||||
}
|
||||
```
|
||||
|
||||
<!-- schema generated by tfplugindocs -->
|
||||
## Schema
|
||||
|
||||
### Required
|
||||
|
||||
- `cpu` (Number) The cpu count of the instance.
|
||||
- `node_type` (String) defines the nodeType it can be either single or HA
|
||||
- `project_id` (String) The project ID of the flavor.
|
||||
- `ram` (Number) The memory of the instance in Gibibyte.
|
||||
- `region` (String) The region of the flavor.
|
||||
- `storage_class` (String) The memory of the instance in Gibibyte.
|
||||
|
||||
### Read-Only
|
||||
|
||||
- `description` (String) The flavor description.
|
||||
- `flavor_id` (String) The id of the instance flavor.
|
||||
- `id` (String) The id of the instance flavor.
|
||||
- `max_gb` (Number) maximum storage which can be ordered for the flavor in Gigabyte.
|
||||
- `min_gb` (Number) minimum storage which is required to order in Gigabyte.
|
||||
- `storage_classes` (Attributes List) maximum storage which can be ordered for the flavor in Gigabyte. (see [below for nested schema](#nestedatt--storage_classes))
|
||||
|
||||
<a id="nestedatt--storage_classes"></a>
|
||||
### Nested Schema for `storage_classes`
|
||||
|
||||
Read-Only:
|
||||
|
||||
- `class` (String)
|
||||
- `max_io_per_sec` (Number)
|
||||
- `max_through_in_mb` (Number)
|
||||
|
|
@ -1,77 +0,0 @@
|
|||
---
|
||||
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||
page_title: "stackitprivatepreview_sqlserverflexbeta_instance Data Source - stackitprivatepreview"
|
||||
subcategory: ""
|
||||
description: |-
|
||||
|
||||
---
|
||||
|
||||
# stackitprivatepreview_sqlserverflexbeta_instance (Data Source)
|
||||
|
||||
|
||||
|
||||
## Example Usage
|
||||
|
||||
```terraform
|
||||
data "stackitprivatepreview_sqlserverflexbeta_instance" "example" {
|
||||
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
}
|
||||
```
|
||||
|
||||
<!-- schema generated by tfplugindocs -->
|
||||
## Schema
|
||||
|
||||
### Required
|
||||
|
||||
- `instance_id` (String) The ID of the instance.
|
||||
- `project_id` (String) The STACKIT project ID.
|
||||
- `region` (String) The region which should be addressed
|
||||
|
||||
### Read-Only
|
||||
|
||||
- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
|
||||
- `edition` (String) Edition of the MSSQL server instance
|
||||
- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption))
|
||||
- `flavor_id` (String) The id of the instance flavor.
|
||||
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
|
||||
- `name` (String) The name of the instance.
|
||||
- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
|
||||
- `replicas` (Number) How many replicas the instance should have.
|
||||
- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365
|
||||
- `status` (String)
|
||||
- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
|
||||
- `tf_original_api_id` (String) The ID of the instance.
|
||||
- `version` (String) The sqlserver version used for the instance.
|
||||
|
||||
<a id="nestedatt--encryption"></a>
|
||||
### Nested Schema for `encryption`
|
||||
|
||||
Read-Only:
|
||||
|
||||
- `kek_key_id` (String) The key identifier
|
||||
- `kek_key_ring_id` (String) The keyring identifier
|
||||
- `kek_key_version` (String) The key version
|
||||
- `service_account` (String)
|
||||
|
||||
|
||||
<a id="nestedatt--network"></a>
|
||||
### Nested Schema for `network`
|
||||
|
||||
Read-Only:
|
||||
|
||||
- `access_scope` (String) The network access scope of the instance
|
||||
|
||||
⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.
|
||||
- `acl` (List of String) List of IPV4 cidr.
|
||||
- `instance_address` (String)
|
||||
- `router_address` (String)
|
||||
|
||||
|
||||
<a id="nestedatt--storage"></a>
|
||||
### Nested Schema for `storage`
|
||||
|
||||
Read-Only:
|
||||
|
||||
- `class` (String) The storage class for the storage.
|
||||
- `size` (Number) The storage size in Gigabytes.
|
||||
|
|
@ -1,54 +0,0 @@
|
|||
---
|
||||
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||
page_title: "stackitprivatepreview_sqlserverflexbeta_user Data Source - stackitprivatepreview"
|
||||
subcategory: ""
|
||||
description: |-
|
||||
|
||||
---
|
||||
|
||||
# stackitprivatepreview_sqlserverflexbeta_user (Data Source)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<!-- schema generated by tfplugindocs -->
|
||||
## Schema
|
||||
|
||||
### Required
|
||||
|
||||
- `instance_id` (String) The ID of the instance.
|
||||
- `project_id` (String) The STACKIT project ID.
|
||||
- `region` (String) The region which should be addressed
|
||||
|
||||
### Optional
|
||||
|
||||
- `page` (Number) Number of the page of items list to be returned.
|
||||
- `size` (Number) Number of items to be returned on each page.
|
||||
- `sort` (String) Sorting of the users to be returned on each page.
|
||||
|
||||
### Read-Only
|
||||
|
||||
- `pagination` (Attributes) (see [below for nested schema](#nestedatt--pagination))
|
||||
- `users` (Attributes List) List of all users inside an instance (see [below for nested schema](#nestedatt--users))
|
||||
|
||||
<a id="nestedatt--pagination"></a>
|
||||
### Nested Schema for `pagination`
|
||||
|
||||
Read-Only:
|
||||
|
||||
- `page` (Number)
|
||||
- `size` (Number)
|
||||
- `sort` (String)
|
||||
- `total_pages` (Number)
|
||||
- `total_rows` (Number)
|
||||
|
||||
|
||||
<a id="nestedatt--users"></a>
|
||||
### Nested Schema for `users`
|
||||
|
||||
Read-Only:
|
||||
|
||||
- `status` (String) The current status of the user.
|
||||
- `tf_original_api_id` (Number) The ID of the user.
|
||||
- `username` (String) The name of the user.
|
||||
|
|
@ -1,83 +0,0 @@
|
|||
---
|
||||
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||
page_title: "stackitprivatepreview Provider"
|
||||
description: |-
|
||||
|
||||
---
|
||||
|
||||
# stackitprivatepreview Provider
|
||||
|
||||
|
||||
|
||||
## Example Usage
|
||||
|
||||
```terraform
|
||||
provider "stackitprivatepreview" {
|
||||
default_region = "eu01"
|
||||
}
|
||||
|
||||
provider "stackitprivatepreview" {
|
||||
default_region = "eu01"
|
||||
service_account_key_path = "service_account.json"
|
||||
}
|
||||
|
||||
# Authentication
|
||||
|
||||
# Key flow
|
||||
provider "stackitprivatepreview" {
|
||||
default_region = "eu01"
|
||||
service_account_key = var.service_account_key
|
||||
private_key = var.private_key
|
||||
}
|
||||
|
||||
# Key flow (using path)
|
||||
provider "stackitprivatepreview" {
|
||||
default_region = "eu01"
|
||||
service_account_key_path = var.service_account_key_path
|
||||
private_key_path = var.private_key_path
|
||||
}
|
||||
```
|
||||
|
||||
<!-- schema generated by tfplugindocs -->
|
||||
## Schema
|
||||
|
||||
### Optional
|
||||
|
||||
- `authorization_custom_endpoint` (String) Custom endpoint for the Membership service
|
||||
- `cdn_custom_endpoint` (String) Custom endpoint for the CDN service
|
||||
- `credentials_path` (String) Path of JSON from where the credentials are read. Takes precedence over the env var `STACKIT_CREDENTIALS_PATH`. Default value is `~/.stackit/credentials.json`.
|
||||
- `default_region` (String) Region will be used as the default location for regional services. Not all services require a region, some are global
|
||||
- `dns_custom_endpoint` (String) Custom endpoint for the DNS service
|
||||
- `enable_beta_resources` (Boolean) Enable beta resources. Default is false.
|
||||
- `experiments` (List of String) Enables experiments. These are unstable features without official support. More information can be found in the README. Available Experiments: iam, routing-tables, network
|
||||
- `git_custom_endpoint` (String) Custom endpoint for the Git service
|
||||
- `iaas_custom_endpoint` (String) Custom endpoint for the IaaS service
|
||||
- `kms_custom_endpoint` (String) Custom endpoint for the KMS service
|
||||
- `loadbalancer_custom_endpoint` (String) Custom endpoint for the Load Balancer service
|
||||
- `logme_custom_endpoint` (String) Custom endpoint for the LogMe service
|
||||
- `mariadb_custom_endpoint` (String) Custom endpoint for the MariaDB service
|
||||
- `modelserving_custom_endpoint` (String) Custom endpoint for the AI Model Serving service
|
||||
- `mongodbflex_custom_endpoint` (String) Custom endpoint for the MongoDB Flex service
|
||||
- `objectstorage_custom_endpoint` (String) Custom endpoint for the Object Storage service
|
||||
- `observability_custom_endpoint` (String) Custom endpoint for the Observability service
|
||||
- `opensearch_custom_endpoint` (String) Custom endpoint for the OpenSearch service
|
||||
- `postgresflex_custom_endpoint` (String) Custom endpoint for the PostgresFlex service
|
||||
- `private_key` (String) Private RSA key used for authentication, relevant for the key flow. It takes precedence over the private key that is included in the service account key.
|
||||
- `private_key_path` (String) Path for the private RSA key used for authentication, relevant for the key flow. It takes precedence over the private key that is included in the service account key.
|
||||
- `rabbitmq_custom_endpoint` (String) Custom endpoint for the RabbitMQ service
|
||||
- `redis_custom_endpoint` (String) Custom endpoint for the Redis service
|
||||
- `region` (String, Deprecated) Region will be used as the default location for regional services. Not all services require a region, some are global
|
||||
- `resourcemanager_custom_endpoint` (String) Custom endpoint for the Resource Manager service
|
||||
- `scf_custom_endpoint` (String) Custom endpoint for the Cloud Foundry (SCF) service
|
||||
- `secretsmanager_custom_endpoint` (String) Custom endpoint for the Secrets Manager service
|
||||
- `server_backup_custom_endpoint` (String) Custom endpoint for the Server Backup service
|
||||
- `server_update_custom_endpoint` (String) Custom endpoint for the Server Update service
|
||||
- `service_account_custom_endpoint` (String) Custom endpoint for the Service Account service
|
||||
- `service_account_email` (String, Deprecated) Service account email. It can also be set using the environment variable STACKIT_SERVICE_ACCOUNT_EMAIL. It is required if you want to use the resource manager project resource.
|
||||
- `service_account_key` (String) Service account key used for authentication. If set, the key flow will be used to authenticate all operations.
|
||||
- `service_account_key_path` (String) Path for the service account key used for authentication. If set, the key flow will be used to authenticate all operations.
|
||||
- `service_account_token` (String, Deprecated) Token used for authentication. If set, the token flow will be used to authenticate all operations.
|
||||
- `service_enablement_custom_endpoint` (String) Custom endpoint for the Service Enablement API
|
||||
- `ske_custom_endpoint` (String) Custom endpoint for the Kubernetes Engine (SKE) service
|
||||
- `sqlserverflex_custom_endpoint` (String) Custom endpoint for the SQL Server Flex service
|
||||
- `token_custom_endpoint` (String) Custom endpoint for the token API, which is used to request access tokens when using the key flow
|
||||
|
|
@ -1,57 +0,0 @@
|
|||
---
|
||||
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||
page_title: "stackitprivatepreview_postgresflexalpha_database Resource - stackitprivatepreview"
|
||||
subcategory: ""
|
||||
description: |-
|
||||
|
||||
---
|
||||
|
||||
# stackitprivatepreview_postgresflexalpha_database (Resource)
|
||||
|
||||
|
||||
|
||||
## Example Usage
|
||||
|
||||
```terraform
|
||||
resource "stackitprivatepreview_postgresflexalpha_database" "example" {
|
||||
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
name = "mydb"
|
||||
owner = "myusername"
|
||||
}
|
||||
|
||||
# Only use the import statement, if you want to import an existing postgresflex database
|
||||
import {
|
||||
to = stackitprivatepreview_postgresflexalpha_database.import-example
|
||||
id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.postgres_database_id}"
|
||||
}
|
||||
|
||||
import {
|
||||
to = stackitprivatepreview_postgresflexalpha_database.import-example
|
||||
identity = {
|
||||
project_id = "project_id"
|
||||
region = "region"
|
||||
instance_id = "instance_id"
|
||||
database_id = "database_id"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
<!-- schema generated by tfplugindocs -->
|
||||
## Schema
|
||||
|
||||
### Required
|
||||
|
||||
- `name` (String) The name of the database.
|
||||
|
||||
### Optional
|
||||
|
||||
- `database_id` (Number) The ID of the database.
|
||||
- `instance_id` (String) The ID of the instance.
|
||||
- `owner` (String) The owner of the database.
|
||||
- `project_id` (String) The STACKIT project ID.
|
||||
- `region` (String) The region which should be addressed
|
||||
|
||||
### Read-Only
|
||||
|
||||
- `id` (Number) The id of the database.
|
||||
|
|
@ -1,131 +0,0 @@
|
|||
---
|
||||
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||
page_title: "stackitprivatepreview_postgresflexalpha_instance Resource - stackitprivatepreview"
|
||||
subcategory: ""
|
||||
description: |-
|
||||
|
||||
---
|
||||
|
||||
# stackitprivatepreview_postgresflexalpha_instance (Resource)
|
||||
|
||||
|
||||
|
||||
## Example Usage
|
||||
|
||||
```terraform
|
||||
resource "stackitprivatepreview_postgresflexalpha_instance" "example-instance" {
|
||||
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
name = "example-instance"
|
||||
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
||||
backup_schedule = "0 0 * * *"
|
||||
retention_days = 30
|
||||
flavor_id = "flavor.id"
|
||||
replicas = 1
|
||||
storage = {
|
||||
performance_class = "premium-perf2-stackit"
|
||||
size = 10
|
||||
}
|
||||
encryption = {
|
||||
kek_key_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
kek_key_ring_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
kek_key_version = 1
|
||||
service_account = "service@account.email"
|
||||
}
|
||||
network = {
|
||||
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
||||
access_scope = "PUBLIC"
|
||||
}
|
||||
version = 17
|
||||
}
|
||||
|
||||
# Only use the import statement, if you want to import an existing postgresflex instance
|
||||
import {
|
||||
to = stackitprivatepreview_postgresflexalpha_instance.import-example
|
||||
id = "${var.project_id},${var.region},${var.postgres_instance_id}"
|
||||
}
|
||||
|
||||
import {
|
||||
to = stackitprivatepreview_postgresflexalpha_instance.import-example
|
||||
identity = {
|
||||
project_id = var.project_id
|
||||
region = var.region
|
||||
instance_id = var.postgres_instance_id
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
<!-- schema generated by tfplugindocs -->
|
||||
## Schema
|
||||
|
||||
### Required
|
||||
|
||||
- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
|
||||
- `flavor_id` (String) The id of the instance flavor.
|
||||
- `name` (String) The name of the instance.
|
||||
- `network` (Attributes) The access configuration of the instance (see [below for nested schema](#nestedatt--network))
|
||||
- `replicas` (Number) How many replicas the instance should have.
|
||||
- `retention_days` (Number) How long backups are retained. The value can only be between 32 and 365 days.
|
||||
- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
|
||||
- `version` (String) The Postgres version used for the instance. See [Versions Endpoint](/documentation/postgres-flex-service/version/v3alpha1#tag/Version) for supported version parameters.
|
||||
|
||||
### Optional
|
||||
|
||||
- `encryption` (Attributes) The configuration for instance's volume and backup storage encryption.
|
||||
|
||||
⚠ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected. (see [below for nested schema](#nestedatt--encryption))
|
||||
- `instance_id` (String) The ID of the instance.
|
||||
- `project_id` (String) The STACKIT project ID.
|
||||
- `region` (String) The region which should be addressed
|
||||
|
||||
### Read-Only
|
||||
|
||||
- `acl` (List of String) List of IPV4 cidr.
|
||||
- `connection_info` (Attributes) The DNS name and port in the instance overview (see [below for nested schema](#nestedatt--connection_info))
|
||||
- `id` (String) The ID of the instance.
|
||||
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
|
||||
- `status` (String) The current status of the instance.
|
||||
|
||||
<a id="nestedatt--network"></a>
|
||||
### Nested Schema for `network`
|
||||
|
||||
Required:
|
||||
|
||||
- `acl` (List of String) List of IPV4 cidr.
|
||||
|
||||
Optional:
|
||||
|
||||
- `access_scope` (String) The access scope of the instance. It defines if the instance is public or airgapped.
|
||||
|
||||
Read-Only:
|
||||
|
||||
- `instance_address` (String)
|
||||
- `router_address` (String)
|
||||
|
||||
|
||||
<a id="nestedatt--storage"></a>
|
||||
### Nested Schema for `storage`
|
||||
|
||||
Required:
|
||||
|
||||
- `performance_class` (String) The storage class for the storage.
|
||||
- `size` (Number) The storage size in Gigabytes.
|
||||
|
||||
|
||||
<a id="nestedatt--encryption"></a>
|
||||
### Nested Schema for `encryption`
|
||||
|
||||
Required:
|
||||
|
||||
- `kek_key_id` (String) The encryption-key key identifier
|
||||
- `kek_key_ring_id` (String) The encryption-key keyring identifier
|
||||
- `kek_key_version` (String) The encryption-key version
|
||||
- `service_account` (String)
|
||||
|
||||
|
||||
<a id="nestedatt--connection_info"></a>
|
||||
### Nested Schema for `connection_info`
|
||||
|
||||
Read-Only:
|
||||
|
||||
- `host` (String) The host of the instance.
|
||||
- `port` (Number) The port of the instance.
|
||||
|
|
@ -1,59 +0,0 @@
|
|||
---
|
||||
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||
page_title: "stackitprivatepreview_postgresflexalpha_user Resource - stackitprivatepreview"
|
||||
subcategory: ""
|
||||
description: |-
|
||||
|
||||
---
|
||||
|
||||
# stackitprivatepreview_postgresflexalpha_user (Resource)
|
||||
|
||||
|
||||
|
||||
## Example Usage
|
||||
|
||||
```terraform
|
||||
resource "stackitprivatepreview_postgresflexalpha_user" "example" {
|
||||
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
name = "username"
|
||||
roles = ["role"]
|
||||
}
|
||||
|
||||
# Only use the import statement, if you want to import an existing postgresflex user
|
||||
import {
|
||||
to = stackitprivatepreview_postgresflexalpha_user.import-example
|
||||
id = "${var.project_id},${var.region},${var.postgres_instance_id},${var.user_id}"
|
||||
}
|
||||
|
||||
import {
|
||||
to = stackitprivatepreview_postgresflexalpha_user.import-example
|
||||
identity = {
|
||||
project_id = "project.id"
|
||||
region = "region"
|
||||
instance_id = "instance.id"
|
||||
user_id = "user.id"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
<!-- schema generated by tfplugindocs -->
|
||||
## Schema
|
||||
|
||||
### Required
|
||||
|
||||
- `name` (String) The name of the user.
|
||||
|
||||
### Optional
|
||||
|
||||
- `instance_id` (String) The ID of the instance.
|
||||
- `project_id` (String) The STACKIT project ID.
|
||||
- `region` (String) The region which should be addressed
|
||||
- `roles` (List of String) A list containing the user roles for the instance.
|
||||
- `user_id` (Number) The ID of the user.
|
||||
|
||||
### Read-Only
|
||||
|
||||
- `id` (Number) The ID of the user.
|
||||
- `password` (String) The password for the user.
|
||||
- `status` (String) The current status of the user.
|
||||
|
|
@ -1,63 +0,0 @@
|
|||
---
|
||||
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||
page_title: "stackitprivatepreview_sqlserverflexalpha_database Resource - stackitprivatepreview"
|
||||
subcategory: ""
|
||||
description: |-
|
||||
|
||||
---
|
||||
|
||||
# stackitprivatepreview_sqlserverflexalpha_database (Resource)
|
||||
|
||||
|
||||
|
||||
## Example Usage
|
||||
|
||||
```terraform
|
||||
resource "stackitprivatepreview_sqlserverflexalpha_database" "example" {
|
||||
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
collation = ""
|
||||
compatibility = "160"
|
||||
name = ""
|
||||
owner = ""
|
||||
}
|
||||
|
||||
# Only use the import statement, if you want to import a existing sqlserverflex database
|
||||
import {
|
||||
to = stackitprivatepreview_sqlserverflexalpha_database.import-example
|
||||
id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
|
||||
}
|
||||
|
||||
import {
|
||||
to = stackitprivatepreview_sqlserverflexalpha_database.import-example
|
||||
identity = {
|
||||
project_id = "project.id"
|
||||
region = "region"
|
||||
instance_id = "instance.id"
|
||||
database_id = "database.id"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
<!-- schema generated by tfplugindocs -->
|
||||
## Schema
|
||||
|
||||
### Required
|
||||
|
||||
- `name` (String) The name of the database.
|
||||
- `owner` (String) The owner of the database.
|
||||
|
||||
### Optional
|
||||
|
||||
- `collation` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
|
||||
- `compatibility` (Number) CompatibilityLevel of the Database.
|
||||
- `database_name` (String) The name of the database.
|
||||
- `instance_id` (String) The ID of the instance.
|
||||
- `project_id` (String) The STACKIT project ID.
|
||||
- `region` (String) The region which should be addressed
|
||||
|
||||
### Read-Only
|
||||
|
||||
- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
|
||||
- `compatibility_level` (Number) CompatibilityLevel of the Database.
|
||||
- `id` (Number) The id of the database.
|
||||
|
|
@ -1,103 +0,0 @@
|
|||
---
|
||||
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||
page_title: "stackitprivatepreview_sqlserverflexalpha_instance Resource - stackitprivatepreview"
|
||||
subcategory: ""
|
||||
description: |-
|
||||
|
||||
---
|
||||
|
||||
# stackitprivatepreview_sqlserverflexalpha_instance (Resource)
|
||||
|
||||
|
||||
|
||||
## Example Usage
|
||||
|
||||
```terraform
|
||||
resource "stackitprivatepreview_sqlserverflexalpha_instance" "example" {
|
||||
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
name = "example-instance"
|
||||
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
||||
backup_schedule = "00 00 * * *"
|
||||
flavor = {
|
||||
cpu = 4
|
||||
ram = 16
|
||||
}
|
||||
storage = {
|
||||
class = "class"
|
||||
size = 5
|
||||
}
|
||||
version = 2022
|
||||
}
|
||||
|
||||
# Only use the import statement, if you want to import an existing sqlserverflex instance
|
||||
import {
|
||||
to = stackitprivatepreview_sqlserverflexalpha_instance.import-example
|
||||
id = "${var.project_id},${var.region},${var.sql_instance_id}"
|
||||
}
|
||||
```
|
||||
|
||||
<!-- schema generated by tfplugindocs -->
|
||||
## Schema
|
||||
|
||||
### Required
|
||||
|
||||
- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
|
||||
- `flavor_id` (String) The id of the instance flavor.
|
||||
- `name` (String) The name of the instance.
|
||||
- `network` (Attributes) the network configuration of the instance. (see [below for nested schema](#nestedatt--network))
|
||||
- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365
|
||||
- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
|
||||
- `version` (String) The sqlserver version used for the instance.
|
||||
|
||||
### Optional
|
||||
|
||||
- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption))
|
||||
- `instance_id` (String) The ID of the instance.
|
||||
- `project_id` (String) The STACKIT project ID.
|
||||
- `region` (String) The region which should be addressed
|
||||
|
||||
### Read-Only
|
||||
|
||||
- `edition` (String) Edition of the MSSQL server instance
|
||||
- `id` (String) The ID of the instance.
|
||||
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
|
||||
- `replicas` (Number) How many replicas the instance should have.
|
||||
- `status` (String)
|
||||
|
||||
<a id="nestedatt--network"></a>
|
||||
### Nested Schema for `network`
|
||||
|
||||
Required:
|
||||
|
||||
- `acl` (List of String) List of IPV4 cidr.
|
||||
|
||||
Optional:
|
||||
|
||||
- `access_scope` (String) The network access scope of the instance
|
||||
|
||||
⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.
|
||||
|
||||
Read-Only:
|
||||
|
||||
- `instance_address` (String)
|
||||
- `router_address` (String)
|
||||
|
||||
|
||||
<a id="nestedatt--storage"></a>
|
||||
### Nested Schema for `storage`
|
||||
|
||||
Required:
|
||||
|
||||
- `class` (String) The storage class for the storage.
|
||||
- `size` (Number) The storage size in Gigabytes.
|
||||
|
||||
|
||||
<a id="nestedatt--encryption"></a>
|
||||
### Nested Schema for `encryption`
|
||||
|
||||
Required:
|
||||
|
||||
- `kek_key_id` (String) The key identifier
|
||||
- `kek_key_ring_id` (String) The keyring identifier
|
||||
- `kek_key_version` (String) The key version
|
||||
- `service_account` (String)
|
||||
|
|
@ -1,53 +0,0 @@
|
|||
---
|
||||
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||
page_title: "stackitprivatepreview_sqlserverflexalpha_user Resource - stackitprivatepreview"
|
||||
subcategory: ""
|
||||
description: |-
|
||||
|
||||
---
|
||||
|
||||
# stackitprivatepreview_sqlserverflexalpha_user (Resource)
|
||||
|
||||
|
||||
|
||||
## Example Usage
|
||||
|
||||
```terraform
|
||||
resource "stackitprivatepreview_sqlserverflexalpha_user" "example" {
|
||||
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
username = "username"
|
||||
roles = ["role"]
|
||||
}
|
||||
|
||||
# Only use the import statement, if you want to import an existing sqlserverflex user
|
||||
import {
|
||||
to = stackitprivatepreview_sqlserverflexalpha_user.import-example
|
||||
id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
|
||||
}
|
||||
```
|
||||
|
||||
<!-- schema generated by tfplugindocs -->
|
||||
## Schema
|
||||
|
||||
### Required
|
||||
|
||||
- `roles` (List of String) A list containing the user roles for the instance. A list with the valid user roles can be retrieved using the List Roles endpoint.
|
||||
- `username` (String) The name of the user.
|
||||
|
||||
### Optional
|
||||
|
||||
- `default_database` (String) The default database for a user of the instance.
|
||||
- `instance_id` (String) The ID of the instance.
|
||||
- `project_id` (String) The STACKIT project ID.
|
||||
- `region` (String) The region which should be addressed
|
||||
- `user_id` (Number) The ID of the user.
|
||||
|
||||
### Read-Only
|
||||
|
||||
- `host` (String) The host of the instance in which the user belongs to.
|
||||
- `id` (Number) The ID of the user.
|
||||
- `password` (String) The password for the user.
|
||||
- `port` (Number) The port of the instance in which the user belongs to.
|
||||
- `status` (String) The current status of the user.
|
||||
- `uri` (String) The connection string for the user to the instance.
|
||||
|
|
@ -1,51 +0,0 @@
|
|||
---
|
||||
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||
page_title: "stackitprivatepreview_sqlserverflexbeta_database Resource - stackitprivatepreview"
|
||||
subcategory: ""
|
||||
description: |-
|
||||
|
||||
---
|
||||
|
||||
# stackitprivatepreview_sqlserverflexbeta_database (Resource)
|
||||
|
||||
|
||||
|
||||
## Example Usage
|
||||
|
||||
```terraform
|
||||
resource "stackitprivatepreview_sqlserverflexalpha_user" "example" {
|
||||
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
username = "username"
|
||||
roles = ["role"]
|
||||
}
|
||||
|
||||
# Only use the import statement, if you want to import an existing sqlserverflex user
|
||||
import {
|
||||
to = stackitprivatepreview_sqlserverflexalpha_user.import-example
|
||||
id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
|
||||
}
|
||||
```
|
||||
|
||||
<!-- schema generated by tfplugindocs -->
|
||||
## Schema
|
||||
|
||||
### Required
|
||||
|
||||
- `name` (String) The name of the database.
|
||||
- `owner` (String) The owner of the database.
|
||||
|
||||
### Optional
|
||||
|
||||
- `collation` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
|
||||
- `compatibility` (Number) CompatibilityLevel of the Database.
|
||||
- `database_name` (String) The name of the database.
|
||||
- `instance_id` (String) The ID of the instance.
|
||||
- `project_id` (String) The STACKIT project ID.
|
||||
- `region` (String) The region which should be addressed
|
||||
|
||||
### Read-Only
|
||||
|
||||
- `collation_name` (String) The collation of the database. This database collation should match the *collation_name* of one of the collations given by the **Get database collation list** endpoint.
|
||||
- `compatibility_level` (Number) CompatibilityLevel of the Database.
|
||||
- `id` (Number) The id of the database.
|
||||
|
|
@ -1,158 +0,0 @@
|
|||
---
|
||||
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||
page_title: "stackitprivatepreview_sqlserverflexbeta_instance Resource - stackitprivatepreview"
|
||||
subcategory: ""
|
||||
description: |-
|
||||
|
||||
---
|
||||
|
||||
# stackitprivatepreview_sqlserverflexbeta_instance (Resource)
|
||||
|
||||
|
||||
|
||||
## Example Usage
|
||||
|
||||
```terraform
|
||||
# without encryption and SNA
|
||||
resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" {
|
||||
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
name = "example-instance"
|
||||
backup_schedule = "0 3 * * *"
|
||||
retention_days = 31
|
||||
flavor_id = "flavor_id"
|
||||
storage = {
|
||||
class = "premium-perf2-stackit"
|
||||
size = 50
|
||||
}
|
||||
version = 2022
|
||||
network = {
|
||||
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
||||
access_scope = "SNA"
|
||||
}
|
||||
}
|
||||
|
||||
# without encryption and PUBLIC
|
||||
resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" {
|
||||
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
name = "example-instance"
|
||||
backup_schedule = "0 3 * * *"
|
||||
retention_days = 31
|
||||
flavor_id = "flavor_id"
|
||||
storage = {
|
||||
class = "premium-perf2-stackit"
|
||||
size = 50
|
||||
}
|
||||
version = 2022
|
||||
network = {
|
||||
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
||||
access_scope = "PUBLIC"
|
||||
}
|
||||
}
|
||||
|
||||
# with encryption and SNA
|
||||
resource "stackitprivatepreview_sqlserverflexbeta_instance" "instance" {
|
||||
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
name = "example-instance"
|
||||
backup_schedule = "0 3 * * *"
|
||||
retention_days = 31
|
||||
flavor_id = "flavor_id"
|
||||
storage = {
|
||||
class = "premium-perf2-stackit"
|
||||
size = 50
|
||||
}
|
||||
version = 2022
|
||||
encryption = {
|
||||
kek_key_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
kek_key_ring_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
kek_key_version = 1
|
||||
service_account = "service_account@email"
|
||||
}
|
||||
network = {
|
||||
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
||||
access_scope = "SNA"
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
# Only use the import statement, if you want to import an existing sqlserverflex instance
|
||||
import {
|
||||
to = stackitprivatepreview_sqlserverflexalpha_instance.import-example
|
||||
id = "${var.project_id},${var.region},${var.sql_instance_id}"
|
||||
}
|
||||
|
||||
# import with identity
|
||||
import {
|
||||
to = stackitprivatepreview_sqlserverflexalpha_instance.import-example
|
||||
identity = {
|
||||
project_id = var.project_id
|
||||
region = var.region
|
||||
instance_id = var.sql_instance_id
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
<!-- schema generated by tfplugindocs -->
|
||||
## Schema
|
||||
|
||||
### Required
|
||||
|
||||
- `backup_schedule` (String) The schedule for on what time and how often the database backup will be created. The schedule is written as a cron schedule.
|
||||
- `flavor_id` (String) The id of the instance flavor.
|
||||
- `name` (String) The name of the instance.
|
||||
- `network` (Attributes) the network configuration of the instance. (see [below for nested schema](#nestedatt--network))
|
||||
- `retention_days` (Number) The days for how long the backup files should be stored before cleaned up. 30 to 365
|
||||
- `storage` (Attributes) The object containing information about the storage size and class. (see [below for nested schema](#nestedatt--storage))
|
||||
- `version` (String) The sqlserver version used for the instance.
|
||||
|
||||
### Optional
|
||||
|
||||
- `encryption` (Attributes) this defines which key to use for storage encryption (see [below for nested schema](#nestedatt--encryption))
|
||||
- `instance_id` (String) The ID of the instance.
|
||||
- `project_id` (String) The STACKIT project ID.
|
||||
- `region` (String) The region which should be addressed
|
||||
|
||||
### Read-Only
|
||||
|
||||
- `edition` (String) Edition of the MSSQL server instance
|
||||
- `id` (String) The ID of the instance.
|
||||
- `is_deletable` (Boolean) Whether the instance can be deleted or not.
|
||||
- `replicas` (Number) How many replicas the instance should have.
|
||||
- `status` (String)
|
||||
|
||||
<a id="nestedatt--network"></a>
|
||||
### Nested Schema for `network`
|
||||
|
||||
Required:
|
||||
|
||||
- `acl` (List of String) List of IPV4 cidr.
|
||||
|
||||
Optional:
|
||||
|
||||
- `access_scope` (String) The network access scope of the instance
|
||||
|
||||
⚠️ **Note:** This feature is in private preview. Supplying this object is only permitted for enabled accounts. If your account does not have access, the request will be rejected.
|
||||
|
||||
Read-Only:
|
||||
|
||||
- `instance_address` (String)
|
||||
- `router_address` (String)
|
||||
|
||||
|
||||
<a id="nestedatt--storage"></a>
|
||||
### Nested Schema for `storage`
|
||||
|
||||
Required:
|
||||
|
||||
- `class` (String) The storage class for the storage.
|
||||
- `size` (Number) The storage size in Gigabytes.
|
||||
|
||||
|
||||
<a id="nestedatt--encryption"></a>
|
||||
### Nested Schema for `encryption`
|
||||
|
||||
Required:
|
||||
|
||||
- `kek_key_id` (String) The key identifier
|
||||
- `kek_key_ring_id` (String) The keyring identifier
|
||||
- `kek_key_version` (String) The key version
|
||||
- `service_account` (String)
|
||||
|
|
@ -1,53 +0,0 @@
|
|||
---
|
||||
# generated by https://github.com/hashicorp/terraform-plugin-docs
|
||||
page_title: "stackitprivatepreview_sqlserverflexbeta_user Resource - stackitprivatepreview"
|
||||
subcategory: ""
|
||||
description: |-
|
||||
|
||||
---
|
||||
|
||||
# stackitprivatepreview_sqlserverflexbeta_user (Resource)
|
||||
|
||||
|
||||
|
||||
## Example Usage
|
||||
|
||||
```terraform
|
||||
resource "stackitprivatepreview_sqlserverflexalpha_user" "example" {
|
||||
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
username = "username"
|
||||
roles = ["role"]
|
||||
}
|
||||
|
||||
# Only use the import statement, if you want to import an existing sqlserverflex user
|
||||
import {
|
||||
to = stackitprivatepreview_sqlserverflexalpha_user.import-example
|
||||
id = "${var.project_id},${var.region},${var.sql_instance_id},${var.sql_user_id}"
|
||||
}
|
||||
```
|
||||
|
||||
<!-- schema generated by tfplugindocs -->
|
||||
## Schema
|
||||
|
||||
### Required
|
||||
|
||||
- `roles` (List of String) A list containing the user roles for the instance. A list with the valid user roles can be retrieved using the List Roles endpoint.
|
||||
- `username` (String) The name of the user.
|
||||
|
||||
### Optional
|
||||
|
||||
- `default_database` (String) The default database for a user of the instance.
|
||||
- `instance_id` (String) The ID of the instance.
|
||||
- `project_id` (String) The STACKIT project ID.
|
||||
- `region` (String) The region which should be addressed
|
||||
- `user_id` (Number) The ID of the user.
|
||||
|
||||
### Read-Only
|
||||
|
||||
- `host` (String) The host of the instance in which the user belongs to.
|
||||
- `id` (Number) The ID of the user.
|
||||
- `password` (String) The password for the user.
|
||||
- `port` (Number) The port of the instance in which the user belongs to.
|
||||
- `status` (String) The current status of the user.
|
||||
- `uri` (String) The connection string for the user to the instance.
|
||||
|
|
@ -17,7 +17,7 @@ resource "stackitprivatepreview_postgresflexalpha_instance" "example-instance" {
|
|||
service_account = "service@account.email"
|
||||
}
|
||||
network = {
|
||||
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
||||
acl = ["XXX.XXX.XXX.X/XX", "XX.XXX.XX.X/XX"]
|
||||
access_scope = "PUBLIC"
|
||||
}
|
||||
version = 17
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
resource "stackitprivatepreview_postgresflexalpha_user" "example" {
|
||||
project_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
instance_id = "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
|
||||
name = "username"
|
||||
name = "username"
|
||||
roles = ["role"]
|
||||
}
|
||||
|
||||
|
|
|
|||
233
go.mod
233
go.mod
|
|
@ -2,10 +2,17 @@ module tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stac
|
|||
|
||||
go 1.25.6
|
||||
|
||||
|
||||
|
||||
require (
|
||||
github.com/SladkyCitron/slogcolor v1.8.0
|
||||
github.com/golang-jwt/jwt/v5 v5.3.1
|
||||
github.com/golangci/golangci-lint/v2 v2.10.1
|
||||
github.com/google/go-cmp v0.7.0
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/hashicorp/terraform-plugin-codegen-framework v0.4.1
|
||||
github.com/hashicorp/terraform-plugin-codegen-openapi v0.3.0
|
||||
github.com/hashicorp/terraform-plugin-docs v0.24.0
|
||||
github.com/hashicorp/terraform-plugin-framework v1.17.0
|
||||
github.com/hashicorp/terraform-plugin-framework-validators v0.19.0
|
||||
github.com/hashicorp/terraform-plugin-go v0.29.0
|
||||
|
|
@ -19,71 +26,277 @@ require (
|
|||
github.com/spf13/cobra v1.10.2
|
||||
github.com/stackitcloud/stackit-sdk-go/core v0.21.1
|
||||
github.com/stackitcloud/stackit-sdk-go/services/iaasalpha v0.1.23-alpha
|
||||
github.com/stretchr/testify v1.11.1
|
||||
github.com/teambition/rrule-go v1.8.2
|
||||
golang.org/x/tools v0.42.0
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/hashicorp/go-retryablehttp v0.7.8 // indirect
|
||||
golang.org/x/telemetry v0.0.0-20260213145524-e0ab670178e1 // indirect
|
||||
)
|
||||
require github.com/hashicorp/go-retryablehttp v0.7.8 // indirect
|
||||
|
||||
require (
|
||||
4d63.com/gocheckcompilerdirectives v1.3.0 // indirect
|
||||
4d63.com/gochecknoglobals v0.2.2 // indirect
|
||||
codeberg.org/chavacava/garif v0.2.0 // indirect
|
||||
codeberg.org/polyfloyd/go-errorlint v1.9.0 // indirect
|
||||
dario.cat/mergo v1.0.1 // indirect
|
||||
dev.gaijin.team/go/exhaustruct/v4 v4.0.0 // indirect
|
||||
dev.gaijin.team/go/golib v0.6.0 // indirect
|
||||
github.com/4meepo/tagalign v1.4.3 // indirect
|
||||
github.com/Abirdcfly/dupword v0.1.7 // indirect
|
||||
github.com/AdminBenni/iota-mixing v1.0.0 // indirect
|
||||
github.com/AlwxSin/noinlineerr v1.0.5 // indirect
|
||||
github.com/Antonboom/errname v1.1.1 // indirect
|
||||
github.com/Antonboom/nilnil v1.1.1 // indirect
|
||||
github.com/Antonboom/testifylint v1.6.4 // indirect
|
||||
github.com/BurntSushi/toml v1.6.0 // indirect
|
||||
github.com/Djarvur/go-err113 v0.1.1 // indirect
|
||||
github.com/Kunde21/markdownfmt/v3 v3.1.0 // indirect
|
||||
github.com/Masterminds/goutils v1.1.1 // indirect
|
||||
github.com/Masterminds/semver/v3 v3.4.0 // indirect
|
||||
github.com/Masterminds/sprig/v3 v3.2.3 // indirect
|
||||
github.com/MirrexOne/unqueryvet v1.5.3 // indirect
|
||||
github.com/OpenPeeDeeP/depguard/v2 v2.2.1 // indirect
|
||||
github.com/ProtonMail/go-crypto v1.3.0 // indirect
|
||||
github.com/agext/levenshtein v1.2.3 // indirect
|
||||
github.com/alecthomas/chroma/v2 v2.23.1 // indirect
|
||||
github.com/alecthomas/go-check-sumtype v0.3.1 // indirect
|
||||
github.com/alexkohler/nakedret/v2 v2.0.6 // indirect
|
||||
github.com/alexkohler/prealloc v1.0.2 // indirect
|
||||
github.com/alfatraining/structtag v1.0.0 // indirect
|
||||
github.com/alingse/asasalint v0.0.11 // indirect
|
||||
github.com/alingse/nilnesserr v0.2.0 // indirect
|
||||
github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect
|
||||
github.com/armon/go-radix v1.0.0 // indirect
|
||||
github.com/ashanbrown/forbidigo/v2 v2.3.0 // indirect
|
||||
github.com/ashanbrown/makezero/v2 v2.1.0 // indirect
|
||||
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
|
||||
github.com/bahlo/generic-list-go v0.2.0 // indirect
|
||||
github.com/beorn7/perks v1.0.1 // indirect
|
||||
github.com/bgentry/speakeasy v0.1.0 // indirect
|
||||
github.com/bkielbasa/cyclop v1.2.3 // indirect
|
||||
github.com/blizzy78/varnamelen v0.8.0 // indirect
|
||||
github.com/bmatcuk/doublestar/v4 v4.9.1 // indirect
|
||||
github.com/bombsimon/wsl/v4 v4.7.0 // indirect
|
||||
github.com/bombsimon/wsl/v5 v5.6.0 // indirect
|
||||
github.com/breml/bidichk v0.3.3 // indirect
|
||||
github.com/breml/errchkjson v0.4.1 // indirect
|
||||
github.com/buger/jsonparser v1.1.1 // indirect
|
||||
github.com/butuzov/ireturn v0.4.0 // indirect
|
||||
github.com/butuzov/mirror v1.3.0 // indirect
|
||||
github.com/catenacyber/perfsprint v0.10.1 // indirect
|
||||
github.com/ccojocar/zxcvbn-go v1.0.4 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||
github.com/charithe/durationcheck v0.0.11 // indirect
|
||||
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc // indirect
|
||||
github.com/charmbracelet/lipgloss v1.1.0 // indirect
|
||||
github.com/charmbracelet/x/ansi v0.10.1 // indirect
|
||||
github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd // indirect
|
||||
github.com/charmbracelet/x/term v0.2.1 // indirect
|
||||
github.com/ckaznocha/intrange v0.3.1 // indirect
|
||||
github.com/cloudflare/circl v1.6.3 // indirect
|
||||
github.com/curioswitch/go-reassign v0.3.0 // indirect
|
||||
github.com/daixiang0/gci v0.13.7 // indirect
|
||||
github.com/dave/dst v0.27.3 // indirect
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
|
||||
github.com/denis-tingaikin/go-header v0.5.0 // indirect
|
||||
github.com/dlclark/regexp2 v1.11.5 // indirect
|
||||
github.com/dprotaso/go-yit v0.0.0-20220510233725-9ba8df137936 // indirect
|
||||
github.com/ettle/strcase v0.2.0 // indirect
|
||||
github.com/fatih/color v1.18.0 // indirect
|
||||
github.com/golang-jwt/jwt/v5 v5.3.1 // indirect
|
||||
github.com/fatih/structtag v1.2.0 // indirect
|
||||
github.com/firefart/nonamedreturns v1.0.6 // indirect
|
||||
github.com/fsnotify/fsnotify v1.5.4 // indirect
|
||||
github.com/fzipp/gocyclo v0.6.0 // indirect
|
||||
github.com/ghostiam/protogetter v0.3.20 // indirect
|
||||
github.com/go-critic/go-critic v0.14.3 // indirect
|
||||
github.com/go-toolsmith/astcast v1.1.0 // indirect
|
||||
github.com/go-toolsmith/astcopy v1.1.0 // indirect
|
||||
github.com/go-toolsmith/astequal v1.2.0 // indirect
|
||||
github.com/go-toolsmith/astfmt v1.1.0 // indirect
|
||||
github.com/go-toolsmith/astp v1.1.0 // indirect
|
||||
github.com/go-toolsmith/strparse v1.1.0 // indirect
|
||||
github.com/go-toolsmith/typep v1.1.0 // indirect
|
||||
github.com/go-viper/mapstructure/v2 v2.5.0 // indirect
|
||||
github.com/go-xmlfmt/xmlfmt v1.1.3 // indirect
|
||||
github.com/gobwas/glob v0.2.3 // indirect
|
||||
github.com/godoc-lint/godoc-lint v0.11.2 // indirect
|
||||
github.com/gofrs/flock v0.13.0 // indirect
|
||||
github.com/golang/protobuf v1.5.4 // indirect
|
||||
github.com/golangci/asciicheck v0.5.0 // indirect
|
||||
github.com/golangci/dupl v0.0.0-20250308024227-f665c8d69b32 // indirect
|
||||
github.com/golangci/go-printf-func-name v0.1.1 // indirect
|
||||
github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d // indirect
|
||||
github.com/golangci/golines v0.15.0 // indirect
|
||||
github.com/golangci/misspell v0.8.0 // indirect
|
||||
github.com/golangci/plugin-module-register v0.1.2 // indirect
|
||||
github.com/golangci/revgrep v0.8.0 // indirect
|
||||
github.com/golangci/swaggoswag v0.0.0-20250504205917-77f2aca3143e // indirect
|
||||
github.com/golangci/unconvert v0.0.0-20250410112200-a129a6e6413e // indirect
|
||||
github.com/gordonklaus/ineffassign v0.2.0 // indirect
|
||||
github.com/gostaticanalysis/analysisutil v0.7.1 // indirect
|
||||
github.com/gostaticanalysis/comment v1.5.0 // indirect
|
||||
github.com/gostaticanalysis/forcetypeassert v0.2.0 // indirect
|
||||
github.com/gostaticanalysis/nilerr v0.1.2 // indirect
|
||||
github.com/hashicorp/cli v1.1.7 // indirect
|
||||
github.com/hashicorp/errwrap v1.1.0 // indirect
|
||||
github.com/hashicorp/go-checkpoint v0.5.0 // indirect
|
||||
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
|
||||
github.com/hashicorp/go-cty v1.5.0 // indirect
|
||||
github.com/hashicorp/go-hclog v1.6.3 // indirect
|
||||
github.com/hashicorp/go-immutable-radix/v2 v2.1.0 // indirect
|
||||
github.com/hashicorp/go-multierror v1.1.1 // indirect
|
||||
github.com/hashicorp/go-plugin v1.7.0 // indirect
|
||||
github.com/hashicorp/go-uuid v1.0.3 // indirect
|
||||
github.com/hashicorp/go-version v1.8.0 // indirect
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
|
||||
github.com/hashicorp/hc-install v0.9.3 // indirect
|
||||
github.com/hashicorp/hcl v1.0.0 // indirect
|
||||
github.com/hashicorp/hcl/v2 v2.24.0 // indirect
|
||||
github.com/hashicorp/logutils v1.0.0 // indirect
|
||||
github.com/hashicorp/terraform-exec v0.25.0 // indirect
|
||||
github.com/hashicorp/terraform-json v0.27.2 // indirect
|
||||
github.com/hashicorp/terraform-plugin-codegen-spec v0.2.0 // indirect
|
||||
github.com/hashicorp/terraform-plugin-sdk/v2 v2.38.2 // indirect
|
||||
github.com/hashicorp/terraform-registry-address v0.4.0 // indirect
|
||||
github.com/hashicorp/terraform-svchost v0.2.0 // indirect
|
||||
github.com/hashicorp/yamux v0.1.2 // indirect
|
||||
github.com/hexops/gotextdiff v1.0.3 // indirect
|
||||
github.com/huandu/xstrings v1.4.0 // indirect
|
||||
github.com/imdario/mergo v0.3.16 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||
github.com/kr/text v0.2.0 // indirect
|
||||
github.com/jgautheron/goconst v1.8.2 // indirect
|
||||
github.com/jingyugao/rowserrcheck v1.1.1 // indirect
|
||||
github.com/jjti/go-spancheck v0.6.5 // indirect
|
||||
github.com/julz/importas v0.2.0 // indirect
|
||||
github.com/karamaru-alpha/copyloopvar v1.2.2 // indirect
|
||||
github.com/kisielk/errcheck v1.9.0 // indirect
|
||||
github.com/kkHAIKE/contextcheck v1.1.6 // indirect
|
||||
github.com/kulti/thelper v0.7.1 // indirect
|
||||
github.com/kunwardeep/paralleltest v1.0.15 // indirect
|
||||
github.com/lasiar/canonicalheader v1.1.2 // indirect
|
||||
github.com/ldez/exptostd v0.4.5 // indirect
|
||||
github.com/ldez/gomoddirectives v0.8.0 // indirect
|
||||
github.com/ldez/grignotin v0.10.1 // indirect
|
||||
github.com/ldez/structtags v0.6.1 // indirect
|
||||
github.com/ldez/tagliatelle v0.7.2 // indirect
|
||||
github.com/ldez/usetesting v0.5.0 // indirect
|
||||
github.com/leonklingele/grouper v1.1.2 // indirect
|
||||
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
|
||||
github.com/macabu/inamedparam v0.2.0 // indirect
|
||||
github.com/magiconair/properties v1.8.6 // indirect
|
||||
github.com/mailru/easyjson v0.7.7 // indirect
|
||||
github.com/manuelarte/embeddedstructfieldcheck v0.4.0 // indirect
|
||||
github.com/manuelarte/funcorder v0.5.0 // indirect
|
||||
github.com/maratori/testableexamples v1.0.1 // indirect
|
||||
github.com/maratori/testpackage v1.1.2 // indirect
|
||||
github.com/matoous/godox v1.1.0 // indirect
|
||||
github.com/mattn/go-colorable v0.1.14 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.16 // indirect
|
||||
github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect
|
||||
github.com/mgechev/revive v1.14.0 // indirect
|
||||
github.com/mitchellh/copystructure v1.2.0 // indirect
|
||||
github.com/mitchellh/go-homedir v1.1.0 // indirect
|
||||
github.com/mitchellh/go-testing-interface v1.14.1 // indirect
|
||||
github.com/mitchellh/go-wordwrap v1.0.1 // indirect
|
||||
github.com/mitchellh/mapstructure v1.5.0 // indirect
|
||||
github.com/mitchellh/reflectwalk v1.0.2 // indirect
|
||||
github.com/moricho/tparallel v0.3.2 // indirect
|
||||
github.com/muesli/termenv v0.16.0 // indirect
|
||||
github.com/nakabonne/nestif v0.3.1 // indirect
|
||||
github.com/nishanths/exhaustive v0.12.0 // indirect
|
||||
github.com/nishanths/predeclared v0.2.2 // indirect
|
||||
github.com/nunnatsa/ginkgolinter v0.23.0 // indirect
|
||||
github.com/oklog/run v1.2.0 // indirect
|
||||
github.com/pb33f/libopenapi v0.15.0 // indirect
|
||||
github.com/pelletier/go-toml v1.9.5 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
||||
github.com/posener/complete v1.2.3 // indirect
|
||||
github.com/prometheus/client_golang v1.12.1 // indirect
|
||||
github.com/prometheus/client_model v0.2.0 // indirect
|
||||
github.com/prometheus/common v0.32.1 // indirect
|
||||
github.com/prometheus/procfs v0.7.3 // indirect
|
||||
github.com/quasilyte/go-ruleguard v0.4.5 // indirect
|
||||
github.com/quasilyte/go-ruleguard/dsl v0.3.23 // indirect
|
||||
github.com/quasilyte/gogrep v0.5.0 // indirect
|
||||
github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727 // indirect
|
||||
github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 // indirect
|
||||
github.com/raeperd/recvcheck v0.2.0 // indirect
|
||||
github.com/rivo/uniseg v0.4.7 // indirect
|
||||
github.com/rogpeppe/go-internal v1.14.1 // indirect
|
||||
github.com/ryancurrah/gomodguard v1.4.1 // indirect
|
||||
github.com/ryanrolds/sqlclosecheck v0.5.1 // indirect
|
||||
github.com/sanposhiho/wastedassign/v2 v2.1.0 // indirect
|
||||
github.com/santhosh-tekuri/jsonschema/v6 v6.0.2 // indirect
|
||||
github.com/sashamelentyev/interfacebloat v1.1.0 // indirect
|
||||
github.com/sashamelentyev/usestdlibvars v1.29.0 // indirect
|
||||
github.com/securego/gosec/v2 v2.23.0 // indirect
|
||||
github.com/shopspring/decimal v1.3.1 // indirect
|
||||
github.com/sirupsen/logrus v1.9.4 // indirect
|
||||
github.com/sivchari/containedctx v1.0.3 // indirect
|
||||
github.com/sonatard/noctx v0.4.0 // indirect
|
||||
github.com/sourcegraph/go-diff v0.7.0 // indirect
|
||||
github.com/spf13/afero v1.15.0 // indirect
|
||||
github.com/spf13/cast v1.5.1 // indirect
|
||||
github.com/spf13/jwalterweatherman v1.1.0 // indirect
|
||||
github.com/spf13/pflag v1.0.10 // indirect
|
||||
github.com/spf13/viper v1.12.0 // indirect
|
||||
github.com/ssgreg/nlreturn/v2 v2.2.1 // indirect
|
||||
github.com/stbenjam/no-sprintf-host-port v0.3.1 // indirect
|
||||
github.com/stretchr/objx v0.5.2 // indirect
|
||||
github.com/stretchr/testify v1.11.1 // indirect
|
||||
github.com/subosito/gotenv v1.4.1 // indirect
|
||||
github.com/tetafro/godot v1.5.4 // indirect
|
||||
github.com/timakin/bodyclose v0.0.0-20241222091800-1db5c5ca4d67 // indirect
|
||||
github.com/timonwong/loggercheck v0.11.0 // indirect
|
||||
github.com/tomarrell/wrapcheck/v2 v2.12.0 // indirect
|
||||
github.com/tommy-muehle/go-mnd/v2 v2.5.1 // indirect
|
||||
github.com/ultraware/funlen v0.2.0 // indirect
|
||||
github.com/ultraware/whitespace v0.2.0 // indirect
|
||||
github.com/uudashr/gocognit v1.2.0 // indirect
|
||||
github.com/uudashr/iface v1.4.1 // indirect
|
||||
github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect
|
||||
github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect
|
||||
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
|
||||
github.com/vmware-labs/yaml-jsonpath v0.3.2 // indirect
|
||||
github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect
|
||||
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect
|
||||
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect
|
||||
github.com/xeipuuv/gojsonschema v1.2.0 // indirect
|
||||
github.com/xen0n/gosmopolitan v1.3.0 // indirect
|
||||
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
|
||||
github.com/yagipy/maintidx v1.0.0 // indirect
|
||||
github.com/yeya24/promlinter v0.3.0 // indirect
|
||||
github.com/ykadowak/zerologlint v0.1.5 // indirect
|
||||
github.com/yuin/goldmark v1.7.7 // indirect
|
||||
github.com/yuin/goldmark-meta v1.1.0 // indirect
|
||||
github.com/zclconf/go-cty v1.17.0 // indirect
|
||||
gitlab.com/bosi/decorder v0.4.2 // indirect
|
||||
go-simpler.org/musttag v0.14.0 // indirect
|
||||
go-simpler.org/sloglint v0.11.1 // indirect
|
||||
go.abhg.dev/goldmark/frontmatter v0.2.0 // indirect
|
||||
go.augendre.info/arangolint v0.4.0 // indirect
|
||||
go.augendre.info/fatcontext v0.9.0 // indirect
|
||||
go.uber.org/multierr v1.10.0 // indirect
|
||||
go.uber.org/zap v1.27.0 // indirect
|
||||
go.yaml.in/yaml/v3 v3.0.4 // indirect
|
||||
golang.org/x/crypto v0.48.0 // indirect
|
||||
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b // indirect
|
||||
golang.org/x/exp/typeparams v0.0.0-20260209203927-2842357ff358 // indirect
|
||||
golang.org/x/mod v0.33.0 // indirect
|
||||
golang.org/x/net v0.50.0 // indirect
|
||||
golang.org/x/sync v0.19.0 // indirect
|
||||
golang.org/x/sys v0.41.0 // indirect
|
||||
golang.org/x/telemetry v0.0.0-20260209163413-e7419c687ee4 // indirect
|
||||
golang.org/x/text v0.34.0 // indirect
|
||||
golang.org/x/tools v0.42.0 // indirect
|
||||
google.golang.org/appengine v1.6.8 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20260209200024-4cfbd4190f57 // indirect
|
||||
google.golang.org/grpc v1.79.1 // indirect
|
||||
google.golang.org/protobuf v1.36.11 // indirect
|
||||
gopkg.in/ini.v1 v1.67.0 // indirect
|
||||
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||
honnef.co/go/tools v0.7.0 // indirect
|
||||
mvdan.cc/gofumpt v0.9.2 // indirect
|
||||
mvdan.cc/unparam v0.0.0-20251027182757-5beb8c8f8f15 // indirect
|
||||
)
|
||||
|
||||
tool golang.org/x/tools/cmd/goimports
|
||||
|
|
|
|||
|
|
@ -2,6 +2,13 @@
|
|||
version: "2"
|
||||
run:
|
||||
concurrency: 4
|
||||
output:
|
||||
formats:
|
||||
text:
|
||||
print-linter-name: true
|
||||
print-issued-lines: true
|
||||
colors: true
|
||||
path: stdout
|
||||
linters:
|
||||
enable:
|
||||
- bodyclose
|
||||
|
|
@ -68,6 +75,10 @@ linters:
|
|||
- name: empty-lines
|
||||
- name: early-return
|
||||
exclusions:
|
||||
paths:
|
||||
- stackit-sdk-generator/
|
||||
- generated/
|
||||
- pkg_gen/
|
||||
generated: lax
|
||||
warn-unused: true
|
||||
# Excluding configuration per-path, per-linter, per-text and per-source.
|
||||
|
|
@ -76,14 +87,6 @@ linters:
|
|||
- path: _test\.go
|
||||
linters:
|
||||
- gochecknoinits
|
||||
paths:
|
||||
- third_party/
|
||||
- builtin/
|
||||
- examples/
|
||||
- tools/copy.go
|
||||
- tools/main.go
|
||||
- pkg_gen/
|
||||
- cmd/
|
||||
formatters:
|
||||
enable:
|
||||
- gofmt
|
||||
|
|
@ -91,12 +94,4 @@ formatters:
|
|||
settings:
|
||||
goimports:
|
||||
local-prefixes:
|
||||
- tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview
|
||||
exclusions:
|
||||
generated: lax
|
||||
paths:
|
||||
- third_party/
|
||||
- builtin/
|
||||
- examples/
|
||||
- pkg_gen/
|
||||
- cmd/
|
||||
- tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview
|
||||
|
|
@ -20,13 +20,20 @@ func TestName() string {
|
|||
}
|
||||
|
||||
func ActivateEnvironmentHttpMocks() {
|
||||
httpmock.RegisterNoResponder(func(req *http.Request) (*http.Response, error) {
|
||||
return nil, fmt.Errorf("no responder found for %s %s, please check your http mocks", req.Method, req.URL)
|
||||
})
|
||||
|
||||
httpmock.RegisterRegexpResponder("GET", regexp.MustCompile(`^https://api\.bap\.microsoft\.com/providers/Microsoft\.BusinessAppPlatform/locations/(europe|unitedstates)/environmentLanguages\?api-version=2023-06-01$`),
|
||||
httpmock.RegisterNoResponder(
|
||||
func(req *http.Request) (*http.Response, error) {
|
||||
return httpmock.NewStringResponse(http.StatusOK, httpmock.File("../../services/languages/tests/datasource/Validate_Read/get_languages.json").String()), nil
|
||||
})
|
||||
return nil, fmt.Errorf("no responder found for %s %s, please check your http mocks", req.Method, req.URL)
|
||||
},
|
||||
)
|
||||
|
||||
httpmock.RegisterRegexpResponder(
|
||||
"GET",
|
||||
regexp.MustCompile(`^https://api\.bap\.microsoft\.com/providers/Microsoft\.BusinessAppPlatform/locations/(europe|unitedstates)/environmentLanguages\?api-version=2023-06-01$`),
|
||||
func(_ *http.Request) (*http.Response, error) {
|
||||
return httpmock.NewStringResponse(
|
||||
http.StatusOK,
|
||||
httpmock.File("../../services/languages/tests/datasource/Validate_Read/get_languages.json").String(),
|
||||
), nil
|
||||
},
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,19 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# This script lints the SDK modules and the internal examples
|
||||
# Pre-requisites: golangci-lint
|
||||
set -eo pipefail
|
||||
|
||||
ROOT_DIR=$(git rev-parse --show-toplevel)
|
||||
GOLANG_CI_YAML_PATH="${ROOT_DIR}/golang-ci.yaml"
|
||||
GOLANG_CI_ARGS="--allow-parallel-runners --timeout=5m --config=${GOLANG_CI_YAML_PATH}"
|
||||
|
||||
if type -p golangci-lint >/dev/null; then
|
||||
:
|
||||
else
|
||||
echo "golangci-lint not installed, unable to proceed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cd ${ROOT_DIR}
|
||||
golangci-lint run ${GOLANG_CI_ARGS}
|
||||
|
|
@ -17,11 +17,7 @@ elif [ "$action" = "tools" ]; then
|
|||
|
||||
go mod download
|
||||
|
||||
# go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.62.0
|
||||
go install github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.7.2
|
||||
|
||||
# go install github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs@v0.21.0
|
||||
go install github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs@v0.24.0
|
||||
go install golang.org/x/tools/cmd/goimports@v0.42.0
|
||||
else
|
||||
echo "Invalid action: '$action', please use $0 help for help"
|
||||
fi
|
||||
|
|
|
|||
|
|
@ -14,5 +14,5 @@ fi
|
|||
mkdir -p ${ROOT_DIR}/docs
|
||||
|
||||
echo ">> Generating documentation"
|
||||
tfplugindocs generate \
|
||||
go run github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs generate \
|
||||
--provider-name "stackitprivatepreview"
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ const (
|
|||
type EphemeralProviderData struct {
|
||||
ProviderData
|
||||
|
||||
PrivateKey string
|
||||
PrivateKey string //nolint:gosec //this is a placeholder and not used in this code
|
||||
PrivateKeyPath string
|
||||
ServiceAccountKey string
|
||||
ServiceAccountKeyPath string
|
||||
|
|
@ -105,11 +105,13 @@ func DiagsToError(diags diag.Diagnostics) error {
|
|||
diagsError := diags.Errors()
|
||||
diagsStrings := make([]string, 0)
|
||||
for _, diagnostic := range diagsError {
|
||||
diagsStrings = append(diagsStrings, fmt.Sprintf(
|
||||
"(%s) %s",
|
||||
diagnostic.Summary(),
|
||||
diagnostic.Detail(),
|
||||
))
|
||||
diagsStrings = append(
|
||||
diagsStrings, fmt.Sprintf(
|
||||
"(%s) %s",
|
||||
diagnostic.Summary(),
|
||||
diagnostic.Detail(),
|
||||
),
|
||||
)
|
||||
}
|
||||
return fmt.Errorf("%s", strings.Join(diagsStrings, ";"))
|
||||
}
|
||||
|
|
@ -136,14 +138,22 @@ func LogAndAddWarning(ctx context.Context, diags *diag.Diagnostics, summary, det
|
|||
|
||||
func LogAndAddWarningBeta(ctx context.Context, diags *diag.Diagnostics, name string, resourceType ResourceType) {
|
||||
warnTitle := fmt.Sprintf("The %s %q is in beta", resourceType, name)
|
||||
warnContent := fmt.Sprintf("The %s %q is in beta and may be subject to breaking changes in the future. Use with caution.", resourceType, name)
|
||||
warnContent := fmt.Sprintf(
|
||||
"The %s %q is in beta and may be subject to breaking changes in the future. Use with caution.",
|
||||
resourceType,
|
||||
name,
|
||||
)
|
||||
tflog.Warn(ctx, fmt.Sprintf("%s | %s", warnTitle, warnContent))
|
||||
diags.AddWarning(warnTitle, warnContent)
|
||||
}
|
||||
|
||||
func LogAndAddErrorBeta(ctx context.Context, diags *diag.Diagnostics, name string, resourceType ResourceType) {
|
||||
errTitle := fmt.Sprintf("The %s %q is in beta and beta is not enabled", resourceType, name)
|
||||
errContent := fmt.Sprintf(`The %s %q is in beta and the beta functionality is currently not enabled. To enable it, set the environment variable STACKIT_TF_ENABLE_BETA_RESOURCES to "true" or set the "enable_beta_resources" provider field to true.`, resourceType, name)
|
||||
errContent := fmt.Sprintf(
|
||||
`The %s %q is in beta and the beta functionality is currently not enabled. To enable it, set the environment variable STACKIT_TF_ENABLE_BETA_RESOURCES to "true" or set the "enable_beta_resources" provider field to true.`,
|
||||
resourceType,
|
||||
name,
|
||||
)
|
||||
tflog.Error(ctx, fmt.Sprintf("%s | %s", errTitle, errContent))
|
||||
diags.AddError(errTitle, errContent)
|
||||
}
|
||||
|
|
@ -161,8 +171,10 @@ func LogResponse(ctx context.Context) context.Context {
|
|||
traceId := runtime.GetTraceId(ctx)
|
||||
ctx = tflog.SetField(ctx, "x-trace-id", traceId)
|
||||
|
||||
tflog.Info(ctx, "response data", map[string]interface{}{
|
||||
"x-trace-id": traceId,
|
||||
})
|
||||
tflog.Info(
|
||||
ctx, "response data", map[string]interface{}{
|
||||
"x-trace-id": traceId,
|
||||
},
|
||||
)
|
||||
return ctx
|
||||
}
|
||||
|
|
|
|||
|
|
@ -98,7 +98,7 @@ func (rrt *RetryRoundTripper) retryLoop(
|
|||
|
||||
waitDuration := rrt.calculateWaitDurationWithJitter(ctx, currentDelay)
|
||||
if err := rrt.waitForDelay(ctx, waitDuration); err != nil {
|
||||
return nil, err // Context was cancelled during wait.
|
||||
return nil, err // Context was canceled during wait.
|
||||
}
|
||||
|
||||
// Exponential backoff for the next potential retry.
|
||||
|
|
@ -153,7 +153,6 @@ func (rrt *RetryRoundTripper) handleFinalError(
|
|||
) error {
|
||||
if resp != nil {
|
||||
if err := resp.Body.Close(); err != nil {
|
||||
|
||||
tflog.Warn(
|
||||
ctx, "Failed to close response body", map[string]interface{}{
|
||||
"error": err.Error(),
|
||||
|
|
@ -194,7 +193,6 @@ func (rrt *RetryRoundTripper) shouldRetry(resp *http.Response, err error) bool {
|
|||
}
|
||||
|
||||
return false
|
||||
|
||||
}
|
||||
|
||||
// calculateWaitDurationWithJitter calculates the backoff duration for the next retry,
|
||||
|
|
@ -232,7 +230,7 @@ func (rrt *RetryRoundTripper) calculateWaitDurationWithJitter(
|
|||
func (rrt *RetryRoundTripper) waitForDelay(ctx context.Context, delay time.Duration) error {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return fmt.Errorf("context cancelled during backoff wait: %w", ctx.Err())
|
||||
return fmt.Errorf("context canceled during backoff wait: %w", ctx.Err())
|
||||
case <-time.After(delay):
|
||||
return nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -72,7 +72,7 @@ func TestRetryRoundTripper_RoundTrip(t *testing.T) {
|
|||
},
|
||||
}
|
||||
tripper := testRetryConfig(mock)
|
||||
req := httptest.NewRequest(http.MethodGet, "/", nil)
|
||||
req := httptest.NewRequest(http.MethodGet, "/", http.NoBody)
|
||||
|
||||
resp, err := tripper.RoundTrip(req)
|
||||
if resp != nil {
|
||||
|
|
@ -110,7 +110,7 @@ func TestRetryRoundTripper_RoundTrip(t *testing.T) {
|
|||
},
|
||||
}
|
||||
tripper := testRetryConfig(mock)
|
||||
req := httptest.NewRequest(http.MethodGet, "/", nil)
|
||||
req := httptest.NewRequest(http.MethodGet, "/", http.NoBody)
|
||||
|
||||
resp, err := tripper.RoundTrip(req)
|
||||
if resp != nil {
|
||||
|
|
@ -155,7 +155,7 @@ func TestRetryRoundTripper_RoundTrip(t *testing.T) {
|
|||
}, nil
|
||||
}
|
||||
tripper := testRetryConfig(mock)
|
||||
req := httptest.NewRequest(http.MethodGet, "/", nil)
|
||||
req := httptest.NewRequest(http.MethodGet, "/", http.NoBody)
|
||||
|
||||
resp, err := tripper.RoundTrip(req)
|
||||
if resp != nil {
|
||||
|
|
@ -185,12 +185,12 @@ func TestRetryRoundTripper_RoundTrip(t *testing.T) {
|
|||
mockErr := errors.New("simulated network error")
|
||||
|
||||
mock := &mockRoundTripper{
|
||||
roundTripFunc: func(req *http.Request) (*http.Response, error) {
|
||||
roundTripFunc: func(_ *http.Request) (*http.Response, error) {
|
||||
return nil, mockErr
|
||||
},
|
||||
}
|
||||
tripper := testRetryConfig(mock)
|
||||
req := httptest.NewRequest(http.MethodGet, "/", nil)
|
||||
req := httptest.NewRequest(http.MethodGet, "/", http.NoBody)
|
||||
|
||||
resp, err := tripper.RoundTrip(req)
|
||||
if resp != nil {
|
||||
|
|
@ -211,7 +211,7 @@ func TestRetryRoundTripper_RoundTrip(t *testing.T) {
|
|||
)
|
||||
|
||||
t.Run(
|
||||
"should abort retries if the main context is cancelled", func(t *testing.T) {
|
||||
"should abort retries if the main context is canceled", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
mock := &mockRoundTripper{
|
||||
|
|
@ -230,7 +230,7 @@ func TestRetryRoundTripper_RoundTrip(t *testing.T) {
|
|||
ctx, cancel := context.WithTimeout(baseCtx, 20*time.Millisecond)
|
||||
defer cancel()
|
||||
|
||||
req := httptest.NewRequest(http.MethodGet, "/", nil).WithContext(ctx)
|
||||
req := httptest.NewRequest(http.MethodGet, "/", http.NoBody).WithContext(ctx)
|
||||
|
||||
resp, err := tripper.RoundTrip(req)
|
||||
if resp != nil {
|
||||
|
|
|
|||
|
|
@ -33,15 +33,27 @@ func InstanceDataSourceSchema(ctx context.Context) schema.Schema {
|
|||
},
|
||||
"connection_info": schema.SingleNestedAttribute{
|
||||
Attributes: map[string]schema.Attribute{
|
||||
"host": schema.StringAttribute{
|
||||
"write": schema.SingleNestedAttribute{
|
||||
Attributes: map[string]schema.Attribute{
|
||||
"host": schema.StringAttribute{
|
||||
Computed: true,
|
||||
Description: "The host of the instance.",
|
||||
MarkdownDescription: "The host of the instance.",
|
||||
},
|
||||
"port": schema.Int64Attribute{
|
||||
Computed: true,
|
||||
Description: "The port of the instance.",
|
||||
MarkdownDescription: "The port of the instance.",
|
||||
},
|
||||
},
|
||||
CustomType: WriteType{
|
||||
ObjectType: types.ObjectType{
|
||||
AttrTypes: WriteValue{}.AttributeTypes(ctx),
|
||||
},
|
||||
},
|
||||
Computed: true,
|
||||
Description: "The host of the instance.",
|
||||
MarkdownDescription: "The host of the instance.",
|
||||
},
|
||||
"port": schema.Int64Attribute{
|
||||
Computed: true,
|
||||
Description: "The port of the instance.",
|
||||
MarkdownDescription: "The port of the instance.",
|
||||
Description: "The DNS name and port in the instance overview",
|
||||
MarkdownDescription: "The DNS name and port in the instance overview",
|
||||
},
|
||||
},
|
||||
CustomType: ConnectionInfoType{
|
||||
|
|
@ -50,8 +62,8 @@ func InstanceDataSourceSchema(ctx context.Context) schema.Schema {
|
|||
},
|
||||
},
|
||||
Computed: true,
|
||||
Description: "The DNS name and port in the instance overview",
|
||||
MarkdownDescription: "The DNS name and port in the instance overview",
|
||||
Description: "The connection information of the instance",
|
||||
MarkdownDescription: "The connection information of the instance",
|
||||
},
|
||||
"encryption": schema.SingleNestedAttribute{
|
||||
Attributes: map[string]schema.Attribute{
|
||||
|
|
@ -243,40 +255,22 @@ func (t ConnectionInfoType) ValueFromObject(ctx context.Context, in basetypes.Ob
|
|||
|
||||
attributes := in.Attributes()
|
||||
|
||||
hostAttribute, ok := attributes["host"]
|
||||
writeAttribute, ok := attributes["write"]
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Attribute Missing",
|
||||
`host is missing from object`)
|
||||
`write is missing from object`)
|
||||
|
||||
return nil, diags
|
||||
}
|
||||
|
||||
hostVal, ok := hostAttribute.(basetypes.StringValue)
|
||||
writeVal, ok := writeAttribute.(basetypes.ObjectValue)
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Attribute Wrong Type",
|
||||
fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
|
||||
}
|
||||
|
||||
portAttribute, ok := attributes["port"]
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Attribute Missing",
|
||||
`port is missing from object`)
|
||||
|
||||
return nil, diags
|
||||
}
|
||||
|
||||
portVal, ok := portAttribute.(basetypes.Int64Value)
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Attribute Wrong Type",
|
||||
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
|
||||
fmt.Sprintf(`write expected to be basetypes.ObjectValue, was: %T`, writeAttribute))
|
||||
}
|
||||
|
||||
if diags.HasError() {
|
||||
|
|
@ -284,8 +278,7 @@ func (t ConnectionInfoType) ValueFromObject(ctx context.Context, in basetypes.Ob
|
|||
}
|
||||
|
||||
return ConnectionInfoValue{
|
||||
Host: hostVal,
|
||||
Port: portVal,
|
||||
Write: writeVal,
|
||||
state: attr.ValueStateKnown,
|
||||
}, diags
|
||||
}
|
||||
|
|
@ -353,40 +346,22 @@ func NewConnectionInfoValue(attributeTypes map[string]attr.Type, attributes map[
|
|||
return NewConnectionInfoValueUnknown(), diags
|
||||
}
|
||||
|
||||
hostAttribute, ok := attributes["host"]
|
||||
writeAttribute, ok := attributes["write"]
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Attribute Missing",
|
||||
`host is missing from object`)
|
||||
`write is missing from object`)
|
||||
|
||||
return NewConnectionInfoValueUnknown(), diags
|
||||
}
|
||||
|
||||
hostVal, ok := hostAttribute.(basetypes.StringValue)
|
||||
writeVal, ok := writeAttribute.(basetypes.ObjectValue)
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Attribute Wrong Type",
|
||||
fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
|
||||
}
|
||||
|
||||
portAttribute, ok := attributes["port"]
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Attribute Missing",
|
||||
`port is missing from object`)
|
||||
|
||||
return NewConnectionInfoValueUnknown(), diags
|
||||
}
|
||||
|
||||
portVal, ok := portAttribute.(basetypes.Int64Value)
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Attribute Wrong Type",
|
||||
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
|
||||
fmt.Sprintf(`write expected to be basetypes.ObjectValue, was: %T`, writeAttribute))
|
||||
}
|
||||
|
||||
if diags.HasError() {
|
||||
|
|
@ -394,8 +369,7 @@ func NewConnectionInfoValue(attributeTypes map[string]attr.Type, attributes map[
|
|||
}
|
||||
|
||||
return ConnectionInfoValue{
|
||||
Host: hostVal,
|
||||
Port: portVal,
|
||||
Write: writeVal,
|
||||
state: attr.ValueStateKnown,
|
||||
}, diags
|
||||
}
|
||||
|
|
@ -468,12 +442,401 @@ func (t ConnectionInfoType) ValueType(ctx context.Context) attr.Value {
|
|||
var _ basetypes.ObjectValuable = ConnectionInfoValue{}
|
||||
|
||||
type ConnectionInfoValue struct {
|
||||
Write basetypes.ObjectValue `tfsdk:"write"`
|
||||
state attr.ValueState
|
||||
}
|
||||
|
||||
func (v ConnectionInfoValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
|
||||
attrTypes := make(map[string]tftypes.Type, 1)
|
||||
|
||||
var val tftypes.Value
|
||||
var err error
|
||||
|
||||
attrTypes["write"] = basetypes.ObjectType{
|
||||
AttrTypes: WriteValue{}.AttributeTypes(ctx),
|
||||
}.TerraformType(ctx)
|
||||
|
||||
objectType := tftypes.Object{AttributeTypes: attrTypes}
|
||||
|
||||
switch v.state {
|
||||
case attr.ValueStateKnown:
|
||||
vals := make(map[string]tftypes.Value, 1)
|
||||
|
||||
val, err = v.Write.ToTerraformValue(ctx)
|
||||
|
||||
if err != nil {
|
||||
return tftypes.NewValue(objectType, tftypes.UnknownValue), err
|
||||
}
|
||||
|
||||
vals["write"] = val
|
||||
|
||||
if err := tftypes.ValidateValue(objectType, vals); err != nil {
|
||||
return tftypes.NewValue(objectType, tftypes.UnknownValue), err
|
||||
}
|
||||
|
||||
return tftypes.NewValue(objectType, vals), nil
|
||||
case attr.ValueStateNull:
|
||||
return tftypes.NewValue(objectType, nil), nil
|
||||
case attr.ValueStateUnknown:
|
||||
return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
|
||||
default:
|
||||
panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
|
||||
}
|
||||
}
|
||||
|
||||
func (v ConnectionInfoValue) IsNull() bool {
|
||||
return v.state == attr.ValueStateNull
|
||||
}
|
||||
|
||||
func (v ConnectionInfoValue) IsUnknown() bool {
|
||||
return v.state == attr.ValueStateUnknown
|
||||
}
|
||||
|
||||
func (v ConnectionInfoValue) String() string {
|
||||
return "ConnectionInfoValue"
|
||||
}
|
||||
|
||||
func (v ConnectionInfoValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
|
||||
var diags diag.Diagnostics
|
||||
|
||||
var write basetypes.ObjectValue
|
||||
|
||||
if v.Write.IsNull() {
|
||||
write = types.ObjectNull(
|
||||
WriteValue{}.AttributeTypes(ctx),
|
||||
)
|
||||
}
|
||||
|
||||
if v.Write.IsUnknown() {
|
||||
write = types.ObjectUnknown(
|
||||
WriteValue{}.AttributeTypes(ctx),
|
||||
)
|
||||
}
|
||||
|
||||
if !v.Write.IsNull() && !v.Write.IsUnknown() {
|
||||
write = types.ObjectValueMust(
|
||||
WriteValue{}.AttributeTypes(ctx),
|
||||
v.Write.Attributes(),
|
||||
)
|
||||
}
|
||||
|
||||
attributeTypes := map[string]attr.Type{
|
||||
"write": basetypes.ObjectType{
|
||||
AttrTypes: WriteValue{}.AttributeTypes(ctx),
|
||||
},
|
||||
}
|
||||
|
||||
if v.IsNull() {
|
||||
return types.ObjectNull(attributeTypes), diags
|
||||
}
|
||||
|
||||
if v.IsUnknown() {
|
||||
return types.ObjectUnknown(attributeTypes), diags
|
||||
}
|
||||
|
||||
objVal, diags := types.ObjectValue(
|
||||
attributeTypes,
|
||||
map[string]attr.Value{
|
||||
"write": write,
|
||||
})
|
||||
|
||||
return objVal, diags
|
||||
}
|
||||
|
||||
func (v ConnectionInfoValue) Equal(o attr.Value) bool {
|
||||
other, ok := o.(ConnectionInfoValue)
|
||||
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
|
||||
if v.state != other.state {
|
||||
return false
|
||||
}
|
||||
|
||||
if v.state != attr.ValueStateKnown {
|
||||
return true
|
||||
}
|
||||
|
||||
if !v.Write.Equal(other.Write) {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func (v ConnectionInfoValue) Type(ctx context.Context) attr.Type {
|
||||
return ConnectionInfoType{
|
||||
basetypes.ObjectType{
|
||||
AttrTypes: v.AttributeTypes(ctx),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (v ConnectionInfoValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
|
||||
return map[string]attr.Type{
|
||||
"write": basetypes.ObjectType{
|
||||
AttrTypes: WriteValue{}.AttributeTypes(ctx),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
var _ basetypes.ObjectTypable = WriteType{}
|
||||
|
||||
type WriteType struct {
|
||||
basetypes.ObjectType
|
||||
}
|
||||
|
||||
func (t WriteType) Equal(o attr.Type) bool {
|
||||
other, ok := o.(WriteType)
|
||||
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
|
||||
return t.ObjectType.Equal(other.ObjectType)
|
||||
}
|
||||
|
||||
func (t WriteType) String() string {
|
||||
return "WriteType"
|
||||
}
|
||||
|
||||
func (t WriteType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
|
||||
var diags diag.Diagnostics
|
||||
|
||||
attributes := in.Attributes()
|
||||
|
||||
hostAttribute, ok := attributes["host"]
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Attribute Missing",
|
||||
`host is missing from object`)
|
||||
|
||||
return nil, diags
|
||||
}
|
||||
|
||||
hostVal, ok := hostAttribute.(basetypes.StringValue)
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Attribute Wrong Type",
|
||||
fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
|
||||
}
|
||||
|
||||
portAttribute, ok := attributes["port"]
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Attribute Missing",
|
||||
`port is missing from object`)
|
||||
|
||||
return nil, diags
|
||||
}
|
||||
|
||||
portVal, ok := portAttribute.(basetypes.Int64Value)
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Attribute Wrong Type",
|
||||
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
|
||||
}
|
||||
|
||||
if diags.HasError() {
|
||||
return nil, diags
|
||||
}
|
||||
|
||||
return WriteValue{
|
||||
Host: hostVal,
|
||||
Port: portVal,
|
||||
state: attr.ValueStateKnown,
|
||||
}, diags
|
||||
}
|
||||
|
||||
func NewWriteValueNull() WriteValue {
|
||||
return WriteValue{
|
||||
state: attr.ValueStateNull,
|
||||
}
|
||||
}
|
||||
|
||||
func NewWriteValueUnknown() WriteValue {
|
||||
return WriteValue{
|
||||
state: attr.ValueStateUnknown,
|
||||
}
|
||||
}
|
||||
|
||||
func NewWriteValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (WriteValue, diag.Diagnostics) {
|
||||
var diags diag.Diagnostics
|
||||
|
||||
// Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
|
||||
ctx := context.Background()
|
||||
|
||||
for name, attributeType := range attributeTypes {
|
||||
attribute, ok := attributes[name]
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Missing WriteValue Attribute Value",
|
||||
"While creating a WriteValue value, a missing attribute value was detected. "+
|
||||
"A WriteValue must contain values for all attributes, even if null or unknown. "+
|
||||
"This is always an issue with the provider and should be reported to the provider developers.\n\n"+
|
||||
fmt.Sprintf("WriteValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
|
||||
)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
if !attributeType.Equal(attribute.Type(ctx)) {
|
||||
diags.AddError(
|
||||
"Invalid WriteValue Attribute Type",
|
||||
"While creating a WriteValue value, an invalid attribute value was detected. "+
|
||||
"A WriteValue must use a matching attribute type for the value. "+
|
||||
"This is always an issue with the provider and should be reported to the provider developers.\n\n"+
|
||||
fmt.Sprintf("WriteValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
|
||||
fmt.Sprintf("WriteValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
for name := range attributes {
|
||||
_, ok := attributeTypes[name]
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Extra WriteValue Attribute Value",
|
||||
"While creating a WriteValue value, an extra attribute value was detected. "+
|
||||
"A WriteValue must not contain values beyond the expected attribute types. "+
|
||||
"This is always an issue with the provider and should be reported to the provider developers.\n\n"+
|
||||
fmt.Sprintf("Extra WriteValue Attribute Name: %s", name),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if diags.HasError() {
|
||||
return NewWriteValueUnknown(), diags
|
||||
}
|
||||
|
||||
hostAttribute, ok := attributes["host"]
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Attribute Missing",
|
||||
`host is missing from object`)
|
||||
|
||||
return NewWriteValueUnknown(), diags
|
||||
}
|
||||
|
||||
hostVal, ok := hostAttribute.(basetypes.StringValue)
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Attribute Wrong Type",
|
||||
fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
|
||||
}
|
||||
|
||||
portAttribute, ok := attributes["port"]
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Attribute Missing",
|
||||
`port is missing from object`)
|
||||
|
||||
return NewWriteValueUnknown(), diags
|
||||
}
|
||||
|
||||
portVal, ok := portAttribute.(basetypes.Int64Value)
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Attribute Wrong Type",
|
||||
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
|
||||
}
|
||||
|
||||
if diags.HasError() {
|
||||
return NewWriteValueUnknown(), diags
|
||||
}
|
||||
|
||||
return WriteValue{
|
||||
Host: hostVal,
|
||||
Port: portVal,
|
||||
state: attr.ValueStateKnown,
|
||||
}, diags
|
||||
}
|
||||
|
||||
func NewWriteValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) WriteValue {
|
||||
object, diags := NewWriteValue(attributeTypes, attributes)
|
||||
|
||||
if diags.HasError() {
|
||||
// This could potentially be added to the diag package.
|
||||
diagsStrings := make([]string, 0, len(diags))
|
||||
|
||||
for _, diagnostic := range diags {
|
||||
diagsStrings = append(diagsStrings, fmt.Sprintf(
|
||||
"%s | %s | %s",
|
||||
diagnostic.Severity(),
|
||||
diagnostic.Summary(),
|
||||
diagnostic.Detail()))
|
||||
}
|
||||
|
||||
panic("NewWriteValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
|
||||
}
|
||||
|
||||
return object
|
||||
}
|
||||
|
||||
func (t WriteType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
|
||||
if in.Type() == nil {
|
||||
return NewWriteValueNull(), nil
|
||||
}
|
||||
|
||||
if !in.Type().Equal(t.TerraformType(ctx)) {
|
||||
return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
|
||||
}
|
||||
|
||||
if !in.IsKnown() {
|
||||
return NewWriteValueUnknown(), nil
|
||||
}
|
||||
|
||||
if in.IsNull() {
|
||||
return NewWriteValueNull(), nil
|
||||
}
|
||||
|
||||
attributes := map[string]attr.Value{}
|
||||
|
||||
val := map[string]tftypes.Value{}
|
||||
|
||||
err := in.As(&val)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for k, v := range val {
|
||||
a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
attributes[k] = a
|
||||
}
|
||||
|
||||
return NewWriteValueMust(WriteValue{}.AttributeTypes(ctx), attributes), nil
|
||||
}
|
||||
|
||||
func (t WriteType) ValueType(ctx context.Context) attr.Value {
|
||||
return WriteValue{}
|
||||
}
|
||||
|
||||
var _ basetypes.ObjectValuable = WriteValue{}
|
||||
|
||||
type WriteValue struct {
|
||||
Host basetypes.StringValue `tfsdk:"host"`
|
||||
Port basetypes.Int64Value `tfsdk:"port"`
|
||||
state attr.ValueState
|
||||
}
|
||||
|
||||
func (v ConnectionInfoValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
|
||||
func (v WriteValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
|
||||
attrTypes := make(map[string]tftypes.Type, 2)
|
||||
|
||||
var val tftypes.Value
|
||||
|
|
@ -518,19 +881,19 @@ func (v ConnectionInfoValue) ToTerraformValue(ctx context.Context) (tftypes.Valu
|
|||
}
|
||||
}
|
||||
|
||||
func (v ConnectionInfoValue) IsNull() bool {
|
||||
func (v WriteValue) IsNull() bool {
|
||||
return v.state == attr.ValueStateNull
|
||||
}
|
||||
|
||||
func (v ConnectionInfoValue) IsUnknown() bool {
|
||||
func (v WriteValue) IsUnknown() bool {
|
||||
return v.state == attr.ValueStateUnknown
|
||||
}
|
||||
|
||||
func (v ConnectionInfoValue) String() string {
|
||||
return "ConnectionInfoValue"
|
||||
func (v WriteValue) String() string {
|
||||
return "WriteValue"
|
||||
}
|
||||
|
||||
func (v ConnectionInfoValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
|
||||
func (v WriteValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
|
||||
var diags diag.Diagnostics
|
||||
|
||||
attributeTypes := map[string]attr.Type{
|
||||
|
|
@ -556,8 +919,8 @@ func (v ConnectionInfoValue) ToObjectValue(ctx context.Context) (basetypes.Objec
|
|||
return objVal, diags
|
||||
}
|
||||
|
||||
func (v ConnectionInfoValue) Equal(o attr.Value) bool {
|
||||
other, ok := o.(ConnectionInfoValue)
|
||||
func (v WriteValue) Equal(o attr.Value) bool {
|
||||
other, ok := o.(WriteValue)
|
||||
|
||||
if !ok {
|
||||
return false
|
||||
|
|
@ -582,15 +945,15 @@ func (v ConnectionInfoValue) Equal(o attr.Value) bool {
|
|||
return true
|
||||
}
|
||||
|
||||
func (v ConnectionInfoValue) Type(ctx context.Context) attr.Type {
|
||||
return ConnectionInfoType{
|
||||
func (v WriteValue) Type(ctx context.Context) attr.Type {
|
||||
return WriteType{
|
||||
basetypes.ObjectType{
|
||||
AttrTypes: v.AttributeTypes(ctx),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (v ConnectionInfoValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
|
||||
func (v WriteValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
|
||||
return map[string]attr.Type{
|
||||
"host": basetypes.StringType{},
|
||||
"port": basetypes.Int64Type{},
|
||||
|
|
|
|||
|
|
@ -33,9 +33,9 @@ func mapGetInstanceResponseToModel(
|
|||
)
|
||||
}
|
||||
|
||||
isConnectionInfoIncomplete := resp.ConnectionInfo == nil ||
|
||||
resp.ConnectionInfo.Host == nil || *resp.ConnectionInfo.Host == "" ||
|
||||
resp.ConnectionInfo.Port == nil || *resp.ConnectionInfo.Port == 0
|
||||
isConnectionInfoIncomplete := resp.ConnectionInfo == nil || resp.ConnectionInfo.Write == nil ||
|
||||
resp.ConnectionInfo.Write.Host == nil || *resp.ConnectionInfo.Write.Host == "" ||
|
||||
resp.ConnectionInfo.Write.Port == nil || *resp.ConnectionInfo.Write.Port == 0
|
||||
|
||||
if isConnectionInfoIncomplete {
|
||||
m.ConnectionInfo = postgresflexalpharesource.NewConnectionInfoValueNull()
|
||||
|
|
@ -43,22 +43,17 @@ func mapGetInstanceResponseToModel(
|
|||
m.ConnectionInfo = postgresflexalpharesource.NewConnectionInfoValueMust(
|
||||
postgresflexalpharesource.ConnectionInfoValue{}.AttributeTypes(ctx),
|
||||
map[string]attr.Value{
|
||||
"host": types.StringPointerValue(resp.ConnectionInfo.Host),
|
||||
"port": types.Int64PointerValue(resp.ConnectionInfo.Port),
|
||||
"write": postgresflexalpharesource.NewWriteValueMust(
|
||||
postgresflexalpharesource.WriteValue{}.AttributeTypes(ctx),
|
||||
map[string]attr.Value{
|
||||
"host": types.StringPointerValue(resp.ConnectionInfo.Write.Host),
|
||||
"port": types.Int64PointerValue(resp.ConnectionInfo.Write.Port),
|
||||
},
|
||||
),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
m.ConnectionInfo.Host = types.StringValue("")
|
||||
if host, ok := resp.ConnectionInfo.GetHostOk(); ok {
|
||||
m.ConnectionInfo.Host = types.StringValue(host)
|
||||
}
|
||||
|
||||
m.ConnectionInfo.Port = types.Int64Value(0)
|
||||
if port, ok := resp.ConnectionInfo.GetPortOk(); ok {
|
||||
m.ConnectionInfo.Port = types.Int64Value(port)
|
||||
}
|
||||
|
||||
m.FlavorId = types.StringValue(resp.GetFlavorId())
|
||||
if m.Id.IsNull() || m.Id.IsUnknown() {
|
||||
m.Id = utils.BuildInternalTerraformId(
|
||||
|
|
@ -164,9 +159,9 @@ func mapGetDataInstanceResponseToModel(
|
|||
}
|
||||
|
||||
func handleConnectionInfo(ctx context.Context, m *dataSourceModel, resp *postgresflex.GetInstanceResponse) {
|
||||
isConnectionInfoIncomplete := resp.ConnectionInfo == nil ||
|
||||
resp.ConnectionInfo.Host == nil || *resp.ConnectionInfo.Host == "" ||
|
||||
resp.ConnectionInfo.Port == nil || *resp.ConnectionInfo.Port == 0
|
||||
isConnectionInfoIncomplete := resp.ConnectionInfo == nil || resp.ConnectionInfo.Write == nil ||
|
||||
resp.ConnectionInfo.Write.Host == nil || *resp.ConnectionInfo.Write.Host == "" ||
|
||||
resp.ConnectionInfo.Write.Port == nil || *resp.ConnectionInfo.Write.Port == 0
|
||||
|
||||
if isConnectionInfoIncomplete {
|
||||
m.ConnectionInfo = postgresflexalphadatasource.NewConnectionInfoValueNull()
|
||||
|
|
@ -174,8 +169,13 @@ func handleConnectionInfo(ctx context.Context, m *dataSourceModel, resp *postgre
|
|||
m.ConnectionInfo = postgresflexalphadatasource.NewConnectionInfoValueMust(
|
||||
postgresflexalphadatasource.ConnectionInfoValue{}.AttributeTypes(ctx),
|
||||
map[string]attr.Value{
|
||||
"host": types.StringPointerValue(resp.ConnectionInfo.Host),
|
||||
"port": types.Int64PointerValue(resp.ConnectionInfo.Port),
|
||||
"write": postgresflexalphadatasource.NewWriteValueMust(
|
||||
postgresflexalphadatasource.WriteValue{}.AttributeTypes(ctx),
|
||||
map[string]attr.Value{
|
||||
"host": types.StringPointerValue(resp.ConnectionInfo.Write.Host),
|
||||
"port": types.Int64PointerValue(resp.ConnectionInfo.Write.Port),
|
||||
},
|
||||
),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -328,10 +328,6 @@ func (r *instanceResource) Read(
|
|||
|
||||
ctx = core.InitProviderContext(ctx)
|
||||
|
||||
// projectId := model.ProjectId.ValueString()
|
||||
// region := r.providerData.GetRegionWithOverride(model.Region)
|
||||
// instanceId := model.InstanceId.ValueString()
|
||||
|
||||
var projectId string
|
||||
if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() {
|
||||
projectId = model.ProjectId.ValueString()
|
||||
|
|
@ -435,18 +431,6 @@ func (r *instanceResource) Update(
|
|||
return
|
||||
}
|
||||
|
||||
// if model.InstanceId.IsNull() || model.InstanceId.IsUnknown() {
|
||||
// core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating instance", "instanceId is null or unknown")
|
||||
// return
|
||||
//}
|
||||
//
|
||||
// if model.ProjectId.IsNull() || model.ProjectId.IsUnknown() {
|
||||
// core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating instance", "projectId is null or unknown")
|
||||
// return
|
||||
//}
|
||||
|
||||
// projectId := model.ProjectId.ValueString()
|
||||
// instanceId := model.InstanceId.ValueString()
|
||||
projectId := identityData.ProjectID.ValueString()
|
||||
instanceId := identityData.InstanceID.ValueString()
|
||||
region := model.Region.ValueString()
|
||||
|
|
|
|||
|
|
@ -35,15 +35,27 @@ func InstanceResourceSchema(ctx context.Context) schema.Schema {
|
|||
},
|
||||
"connection_info": schema.SingleNestedAttribute{
|
||||
Attributes: map[string]schema.Attribute{
|
||||
"host": schema.StringAttribute{
|
||||
"write": schema.SingleNestedAttribute{
|
||||
Attributes: map[string]schema.Attribute{
|
||||
"host": schema.StringAttribute{
|
||||
Computed: true,
|
||||
Description: "The host of the instance.",
|
||||
MarkdownDescription: "The host of the instance.",
|
||||
},
|
||||
"port": schema.Int64Attribute{
|
||||
Computed: true,
|
||||
Description: "The port of the instance.",
|
||||
MarkdownDescription: "The port of the instance.",
|
||||
},
|
||||
},
|
||||
CustomType: WriteType{
|
||||
ObjectType: types.ObjectType{
|
||||
AttrTypes: WriteValue{}.AttributeTypes(ctx),
|
||||
},
|
||||
},
|
||||
Computed: true,
|
||||
Description: "The host of the instance.",
|
||||
MarkdownDescription: "The host of the instance.",
|
||||
},
|
||||
"port": schema.Int64Attribute{
|
||||
Computed: true,
|
||||
Description: "The port of the instance.",
|
||||
MarkdownDescription: "The port of the instance.",
|
||||
Description: "The DNS name and port in the instance overview",
|
||||
MarkdownDescription: "The DNS name and port in the instance overview",
|
||||
},
|
||||
},
|
||||
CustomType: ConnectionInfoType{
|
||||
|
|
@ -52,8 +64,8 @@ func InstanceResourceSchema(ctx context.Context) schema.Schema {
|
|||
},
|
||||
},
|
||||
Computed: true,
|
||||
Description: "The DNS name and port in the instance overview",
|
||||
MarkdownDescription: "The DNS name and port in the instance overview",
|
||||
Description: "The connection information of the instance",
|
||||
MarkdownDescription: "The connection information of the instance",
|
||||
},
|
||||
"encryption": schema.SingleNestedAttribute{
|
||||
Attributes: map[string]schema.Attribute{
|
||||
|
|
@ -263,40 +275,22 @@ func (t ConnectionInfoType) ValueFromObject(ctx context.Context, in basetypes.Ob
|
|||
|
||||
attributes := in.Attributes()
|
||||
|
||||
hostAttribute, ok := attributes["host"]
|
||||
writeAttribute, ok := attributes["write"]
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Attribute Missing",
|
||||
`host is missing from object`)
|
||||
`write is missing from object`)
|
||||
|
||||
return nil, diags
|
||||
}
|
||||
|
||||
hostVal, ok := hostAttribute.(basetypes.StringValue)
|
||||
writeVal, ok := writeAttribute.(basetypes.ObjectValue)
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Attribute Wrong Type",
|
||||
fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
|
||||
}
|
||||
|
||||
portAttribute, ok := attributes["port"]
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Attribute Missing",
|
||||
`port is missing from object`)
|
||||
|
||||
return nil, diags
|
||||
}
|
||||
|
||||
portVal, ok := portAttribute.(basetypes.Int64Value)
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Attribute Wrong Type",
|
||||
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
|
||||
fmt.Sprintf(`write expected to be basetypes.ObjectValue, was: %T`, writeAttribute))
|
||||
}
|
||||
|
||||
if diags.HasError() {
|
||||
|
|
@ -304,8 +298,7 @@ func (t ConnectionInfoType) ValueFromObject(ctx context.Context, in basetypes.Ob
|
|||
}
|
||||
|
||||
return ConnectionInfoValue{
|
||||
Host: hostVal,
|
||||
Port: portVal,
|
||||
Write: writeVal,
|
||||
state: attr.ValueStateKnown,
|
||||
}, diags
|
||||
}
|
||||
|
|
@ -373,40 +366,22 @@ func NewConnectionInfoValue(attributeTypes map[string]attr.Type, attributes map[
|
|||
return NewConnectionInfoValueUnknown(), diags
|
||||
}
|
||||
|
||||
hostAttribute, ok := attributes["host"]
|
||||
writeAttribute, ok := attributes["write"]
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Attribute Missing",
|
||||
`host is missing from object`)
|
||||
`write is missing from object`)
|
||||
|
||||
return NewConnectionInfoValueUnknown(), diags
|
||||
}
|
||||
|
||||
hostVal, ok := hostAttribute.(basetypes.StringValue)
|
||||
writeVal, ok := writeAttribute.(basetypes.ObjectValue)
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Attribute Wrong Type",
|
||||
fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
|
||||
}
|
||||
|
||||
portAttribute, ok := attributes["port"]
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Attribute Missing",
|
||||
`port is missing from object`)
|
||||
|
||||
return NewConnectionInfoValueUnknown(), diags
|
||||
}
|
||||
|
||||
portVal, ok := portAttribute.(basetypes.Int64Value)
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Attribute Wrong Type",
|
||||
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
|
||||
fmt.Sprintf(`write expected to be basetypes.ObjectValue, was: %T`, writeAttribute))
|
||||
}
|
||||
|
||||
if diags.HasError() {
|
||||
|
|
@ -414,8 +389,7 @@ func NewConnectionInfoValue(attributeTypes map[string]attr.Type, attributes map[
|
|||
}
|
||||
|
||||
return ConnectionInfoValue{
|
||||
Host: hostVal,
|
||||
Port: portVal,
|
||||
Write: writeVal,
|
||||
state: attr.ValueStateKnown,
|
||||
}, diags
|
||||
}
|
||||
|
|
@ -488,12 +462,401 @@ func (t ConnectionInfoType) ValueType(ctx context.Context) attr.Value {
|
|||
var _ basetypes.ObjectValuable = ConnectionInfoValue{}
|
||||
|
||||
type ConnectionInfoValue struct {
|
||||
Write basetypes.ObjectValue `tfsdk:"write"`
|
||||
state attr.ValueState
|
||||
}
|
||||
|
||||
func (v ConnectionInfoValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
|
||||
attrTypes := make(map[string]tftypes.Type, 1)
|
||||
|
||||
var val tftypes.Value
|
||||
var err error
|
||||
|
||||
attrTypes["write"] = basetypes.ObjectType{
|
||||
AttrTypes: WriteValue{}.AttributeTypes(ctx),
|
||||
}.TerraformType(ctx)
|
||||
|
||||
objectType := tftypes.Object{AttributeTypes: attrTypes}
|
||||
|
||||
switch v.state {
|
||||
case attr.ValueStateKnown:
|
||||
vals := make(map[string]tftypes.Value, 1)
|
||||
|
||||
val, err = v.Write.ToTerraformValue(ctx)
|
||||
|
||||
if err != nil {
|
||||
return tftypes.NewValue(objectType, tftypes.UnknownValue), err
|
||||
}
|
||||
|
||||
vals["write"] = val
|
||||
|
||||
if err := tftypes.ValidateValue(objectType, vals); err != nil {
|
||||
return tftypes.NewValue(objectType, tftypes.UnknownValue), err
|
||||
}
|
||||
|
||||
return tftypes.NewValue(objectType, vals), nil
|
||||
case attr.ValueStateNull:
|
||||
return tftypes.NewValue(objectType, nil), nil
|
||||
case attr.ValueStateUnknown:
|
||||
return tftypes.NewValue(objectType, tftypes.UnknownValue), nil
|
||||
default:
|
||||
panic(fmt.Sprintf("unhandled Object state in ToTerraformValue: %s", v.state))
|
||||
}
|
||||
}
|
||||
|
||||
func (v ConnectionInfoValue) IsNull() bool {
|
||||
return v.state == attr.ValueStateNull
|
||||
}
|
||||
|
||||
func (v ConnectionInfoValue) IsUnknown() bool {
|
||||
return v.state == attr.ValueStateUnknown
|
||||
}
|
||||
|
||||
func (v ConnectionInfoValue) String() string {
|
||||
return "ConnectionInfoValue"
|
||||
}
|
||||
|
||||
func (v ConnectionInfoValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
|
||||
var diags diag.Diagnostics
|
||||
|
||||
var write basetypes.ObjectValue
|
||||
|
||||
if v.Write.IsNull() {
|
||||
write = types.ObjectNull(
|
||||
WriteValue{}.AttributeTypes(ctx),
|
||||
)
|
||||
}
|
||||
|
||||
if v.Write.IsUnknown() {
|
||||
write = types.ObjectUnknown(
|
||||
WriteValue{}.AttributeTypes(ctx),
|
||||
)
|
||||
}
|
||||
|
||||
if !v.Write.IsNull() && !v.Write.IsUnknown() {
|
||||
write = types.ObjectValueMust(
|
||||
WriteValue{}.AttributeTypes(ctx),
|
||||
v.Write.Attributes(),
|
||||
)
|
||||
}
|
||||
|
||||
attributeTypes := map[string]attr.Type{
|
||||
"write": basetypes.ObjectType{
|
||||
AttrTypes: WriteValue{}.AttributeTypes(ctx),
|
||||
},
|
||||
}
|
||||
|
||||
if v.IsNull() {
|
||||
return types.ObjectNull(attributeTypes), diags
|
||||
}
|
||||
|
||||
if v.IsUnknown() {
|
||||
return types.ObjectUnknown(attributeTypes), diags
|
||||
}
|
||||
|
||||
objVal, diags := types.ObjectValue(
|
||||
attributeTypes,
|
||||
map[string]attr.Value{
|
||||
"write": write,
|
||||
})
|
||||
|
||||
return objVal, diags
|
||||
}
|
||||
|
||||
func (v ConnectionInfoValue) Equal(o attr.Value) bool {
|
||||
other, ok := o.(ConnectionInfoValue)
|
||||
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
|
||||
if v.state != other.state {
|
||||
return false
|
||||
}
|
||||
|
||||
if v.state != attr.ValueStateKnown {
|
||||
return true
|
||||
}
|
||||
|
||||
if !v.Write.Equal(other.Write) {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func (v ConnectionInfoValue) Type(ctx context.Context) attr.Type {
|
||||
return ConnectionInfoType{
|
||||
basetypes.ObjectType{
|
||||
AttrTypes: v.AttributeTypes(ctx),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (v ConnectionInfoValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
|
||||
return map[string]attr.Type{
|
||||
"write": basetypes.ObjectType{
|
||||
AttrTypes: WriteValue{}.AttributeTypes(ctx),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
var _ basetypes.ObjectTypable = WriteType{}
|
||||
|
||||
type WriteType struct {
|
||||
basetypes.ObjectType
|
||||
}
|
||||
|
||||
func (t WriteType) Equal(o attr.Type) bool {
|
||||
other, ok := o.(WriteType)
|
||||
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
|
||||
return t.ObjectType.Equal(other.ObjectType)
|
||||
}
|
||||
|
||||
func (t WriteType) String() string {
|
||||
return "WriteType"
|
||||
}
|
||||
|
||||
func (t WriteType) ValueFromObject(ctx context.Context, in basetypes.ObjectValue) (basetypes.ObjectValuable, diag.Diagnostics) {
|
||||
var diags diag.Diagnostics
|
||||
|
||||
attributes := in.Attributes()
|
||||
|
||||
hostAttribute, ok := attributes["host"]
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Attribute Missing",
|
||||
`host is missing from object`)
|
||||
|
||||
return nil, diags
|
||||
}
|
||||
|
||||
hostVal, ok := hostAttribute.(basetypes.StringValue)
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Attribute Wrong Type",
|
||||
fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
|
||||
}
|
||||
|
||||
portAttribute, ok := attributes["port"]
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Attribute Missing",
|
||||
`port is missing from object`)
|
||||
|
||||
return nil, diags
|
||||
}
|
||||
|
||||
portVal, ok := portAttribute.(basetypes.Int64Value)
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Attribute Wrong Type",
|
||||
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
|
||||
}
|
||||
|
||||
if diags.HasError() {
|
||||
return nil, diags
|
||||
}
|
||||
|
||||
return WriteValue{
|
||||
Host: hostVal,
|
||||
Port: portVal,
|
||||
state: attr.ValueStateKnown,
|
||||
}, diags
|
||||
}
|
||||
|
||||
func NewWriteValueNull() WriteValue {
|
||||
return WriteValue{
|
||||
state: attr.ValueStateNull,
|
||||
}
|
||||
}
|
||||
|
||||
func NewWriteValueUnknown() WriteValue {
|
||||
return WriteValue{
|
||||
state: attr.ValueStateUnknown,
|
||||
}
|
||||
}
|
||||
|
||||
func NewWriteValue(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) (WriteValue, diag.Diagnostics) {
|
||||
var diags diag.Diagnostics
|
||||
|
||||
// Reference: https://github.com/hashicorp/terraform-plugin-framework/issues/521
|
||||
ctx := context.Background()
|
||||
|
||||
for name, attributeType := range attributeTypes {
|
||||
attribute, ok := attributes[name]
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Missing WriteValue Attribute Value",
|
||||
"While creating a WriteValue value, a missing attribute value was detected. "+
|
||||
"A WriteValue must contain values for all attributes, even if null or unknown. "+
|
||||
"This is always an issue with the provider and should be reported to the provider developers.\n\n"+
|
||||
fmt.Sprintf("WriteValue Attribute Name (%s) Expected Type: %s", name, attributeType.String()),
|
||||
)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
if !attributeType.Equal(attribute.Type(ctx)) {
|
||||
diags.AddError(
|
||||
"Invalid WriteValue Attribute Type",
|
||||
"While creating a WriteValue value, an invalid attribute value was detected. "+
|
||||
"A WriteValue must use a matching attribute type for the value. "+
|
||||
"This is always an issue with the provider and should be reported to the provider developers.\n\n"+
|
||||
fmt.Sprintf("WriteValue Attribute Name (%s) Expected Type: %s\n", name, attributeType.String())+
|
||||
fmt.Sprintf("WriteValue Attribute Name (%s) Given Type: %s", name, attribute.Type(ctx)),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
for name := range attributes {
|
||||
_, ok := attributeTypes[name]
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Extra WriteValue Attribute Value",
|
||||
"While creating a WriteValue value, an extra attribute value was detected. "+
|
||||
"A WriteValue must not contain values beyond the expected attribute types. "+
|
||||
"This is always an issue with the provider and should be reported to the provider developers.\n\n"+
|
||||
fmt.Sprintf("Extra WriteValue Attribute Name: %s", name),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if diags.HasError() {
|
||||
return NewWriteValueUnknown(), diags
|
||||
}
|
||||
|
||||
hostAttribute, ok := attributes["host"]
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Attribute Missing",
|
||||
`host is missing from object`)
|
||||
|
||||
return NewWriteValueUnknown(), diags
|
||||
}
|
||||
|
||||
hostVal, ok := hostAttribute.(basetypes.StringValue)
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Attribute Wrong Type",
|
||||
fmt.Sprintf(`host expected to be basetypes.StringValue, was: %T`, hostAttribute))
|
||||
}
|
||||
|
||||
portAttribute, ok := attributes["port"]
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Attribute Missing",
|
||||
`port is missing from object`)
|
||||
|
||||
return NewWriteValueUnknown(), diags
|
||||
}
|
||||
|
||||
portVal, ok := portAttribute.(basetypes.Int64Value)
|
||||
|
||||
if !ok {
|
||||
diags.AddError(
|
||||
"Attribute Wrong Type",
|
||||
fmt.Sprintf(`port expected to be basetypes.Int64Value, was: %T`, portAttribute))
|
||||
}
|
||||
|
||||
if diags.HasError() {
|
||||
return NewWriteValueUnknown(), diags
|
||||
}
|
||||
|
||||
return WriteValue{
|
||||
Host: hostVal,
|
||||
Port: portVal,
|
||||
state: attr.ValueStateKnown,
|
||||
}, diags
|
||||
}
|
||||
|
||||
func NewWriteValueMust(attributeTypes map[string]attr.Type, attributes map[string]attr.Value) WriteValue {
|
||||
object, diags := NewWriteValue(attributeTypes, attributes)
|
||||
|
||||
if diags.HasError() {
|
||||
// This could potentially be added to the diag package.
|
||||
diagsStrings := make([]string, 0, len(diags))
|
||||
|
||||
for _, diagnostic := range diags {
|
||||
diagsStrings = append(diagsStrings, fmt.Sprintf(
|
||||
"%s | %s | %s",
|
||||
diagnostic.Severity(),
|
||||
diagnostic.Summary(),
|
||||
diagnostic.Detail()))
|
||||
}
|
||||
|
||||
panic("NewWriteValueMust received error(s): " + strings.Join(diagsStrings, "\n"))
|
||||
}
|
||||
|
||||
return object
|
||||
}
|
||||
|
||||
func (t WriteType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) {
|
||||
if in.Type() == nil {
|
||||
return NewWriteValueNull(), nil
|
||||
}
|
||||
|
||||
if !in.Type().Equal(t.TerraformType(ctx)) {
|
||||
return nil, fmt.Errorf("expected %s, got %s", t.TerraformType(ctx), in.Type())
|
||||
}
|
||||
|
||||
if !in.IsKnown() {
|
||||
return NewWriteValueUnknown(), nil
|
||||
}
|
||||
|
||||
if in.IsNull() {
|
||||
return NewWriteValueNull(), nil
|
||||
}
|
||||
|
||||
attributes := map[string]attr.Value{}
|
||||
|
||||
val := map[string]tftypes.Value{}
|
||||
|
||||
err := in.As(&val)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for k, v := range val {
|
||||
a, err := t.AttrTypes[k].ValueFromTerraform(ctx, v)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
attributes[k] = a
|
||||
}
|
||||
|
||||
return NewWriteValueMust(WriteValue{}.AttributeTypes(ctx), attributes), nil
|
||||
}
|
||||
|
||||
func (t WriteType) ValueType(ctx context.Context) attr.Value {
|
||||
return WriteValue{}
|
||||
}
|
||||
|
||||
var _ basetypes.ObjectValuable = WriteValue{}
|
||||
|
||||
type WriteValue struct {
|
||||
Host basetypes.StringValue `tfsdk:"host"`
|
||||
Port basetypes.Int64Value `tfsdk:"port"`
|
||||
state attr.ValueState
|
||||
}
|
||||
|
||||
func (v ConnectionInfoValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
|
||||
func (v WriteValue) ToTerraformValue(ctx context.Context) (tftypes.Value, error) {
|
||||
attrTypes := make(map[string]tftypes.Type, 2)
|
||||
|
||||
var val tftypes.Value
|
||||
|
|
@ -538,19 +901,19 @@ func (v ConnectionInfoValue) ToTerraformValue(ctx context.Context) (tftypes.Valu
|
|||
}
|
||||
}
|
||||
|
||||
func (v ConnectionInfoValue) IsNull() bool {
|
||||
func (v WriteValue) IsNull() bool {
|
||||
return v.state == attr.ValueStateNull
|
||||
}
|
||||
|
||||
func (v ConnectionInfoValue) IsUnknown() bool {
|
||||
func (v WriteValue) IsUnknown() bool {
|
||||
return v.state == attr.ValueStateUnknown
|
||||
}
|
||||
|
||||
func (v ConnectionInfoValue) String() string {
|
||||
return "ConnectionInfoValue"
|
||||
func (v WriteValue) String() string {
|
||||
return "WriteValue"
|
||||
}
|
||||
|
||||
func (v ConnectionInfoValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
|
||||
func (v WriteValue) ToObjectValue(ctx context.Context) (basetypes.ObjectValue, diag.Diagnostics) {
|
||||
var diags diag.Diagnostics
|
||||
|
||||
attributeTypes := map[string]attr.Type{
|
||||
|
|
@ -576,8 +939,8 @@ func (v ConnectionInfoValue) ToObjectValue(ctx context.Context) (basetypes.Objec
|
|||
return objVal, diags
|
||||
}
|
||||
|
||||
func (v ConnectionInfoValue) Equal(o attr.Value) bool {
|
||||
other, ok := o.(ConnectionInfoValue)
|
||||
func (v WriteValue) Equal(o attr.Value) bool {
|
||||
other, ok := o.(WriteValue)
|
||||
|
||||
if !ok {
|
||||
return false
|
||||
|
|
@ -602,15 +965,15 @@ func (v ConnectionInfoValue) Equal(o attr.Value) bool {
|
|||
return true
|
||||
}
|
||||
|
||||
func (v ConnectionInfoValue) Type(ctx context.Context) attr.Type {
|
||||
return ConnectionInfoType{
|
||||
func (v WriteValue) Type(ctx context.Context) attr.Type {
|
||||
return WriteType{
|
||||
basetypes.ObjectType{
|
||||
AttrTypes: v.AttributeTypes(ctx),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (v ConnectionInfoValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
|
||||
func (v WriteValue) AttributeTypes(ctx context.Context) map[string]attr.Type {
|
||||
return map[string]attr.Type{
|
||||
"host": basetypes.StringType{},
|
||||
"port": basetypes.Int64Type{},
|
||||
|
|
|
|||
|
|
@ -30,39 +30,46 @@ var testInstances []string
|
|||
|
||||
func init() {
|
||||
sweeperName := fmt.Sprintf("%s_%s", pfx, "sweeper")
|
||||
resource.AddTestSweepers(sweeperName, &resource.Sweeper{
|
||||
Name: sweeperName,
|
||||
F: func(region string) error {
|
||||
ctx := context.Background()
|
||||
apiClientConfigOptions := []config.ConfigurationOption{}
|
||||
apiClient, err := postgresflexalpha2.NewAPIClient(apiClientConfigOptions...)
|
||||
if err != nil {
|
||||
log.Fatalln(err)
|
||||
}
|
||||
resource.AddTestSweepers(
|
||||
sweeperName, &resource.Sweeper{
|
||||
Name: sweeperName,
|
||||
F: func(_ string) error { // region is passed by the testing framework
|
||||
ctx := context.Background()
|
||||
apiClientConfigOptions := []config.ConfigurationOption{}
|
||||
apiClient, err := postgresflexalpha2.NewAPIClient(apiClientConfigOptions...)
|
||||
if err != nil {
|
||||
log.Fatalln(err)
|
||||
}
|
||||
|
||||
instances, err := apiClient.ListInstancesRequest(ctx, testutils.ProjectId, testutils.Region).
|
||||
Size(100).
|
||||
Execute()
|
||||
if err != nil {
|
||||
log.Fatalln(err)
|
||||
}
|
||||
instances, err := apiClient.ListInstancesRequest(ctx, testutils.ProjectId, testutils.Region).
|
||||
Size(100).
|
||||
Execute()
|
||||
if err != nil {
|
||||
log.Fatalln(err)
|
||||
}
|
||||
|
||||
for _, inst := range instances.GetInstances() {
|
||||
if strings.HasPrefix(inst.GetName(), "tf-acc-") {
|
||||
for _, item := range testInstances {
|
||||
if inst.GetName() == item {
|
||||
delErr := apiClient.DeleteInstanceRequestExecute(ctx, testutils.ProjectId, testutils.Region, inst.GetId())
|
||||
if delErr != nil {
|
||||
// TODO: maybe just warn?
|
||||
log.Fatalln(delErr)
|
||||
for _, inst := range instances.GetInstances() {
|
||||
if strings.HasPrefix(inst.GetName(), "tf-acc-") {
|
||||
for _, item := range testInstances {
|
||||
if inst.GetName() == item {
|
||||
delErr := apiClient.DeleteInstanceRequestExecute(
|
||||
ctx,
|
||||
testutils.ProjectId,
|
||||
testutils.Region,
|
||||
inst.GetId(),
|
||||
)
|
||||
if delErr != nil {
|
||||
// TODO: maybe just warn?
|
||||
log.Fatalln(delErr)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
return nil
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
func TestInstanceResourceSchema(t *testing.T) {
|
||||
|
|
@ -195,57 +202,67 @@ func TestAccInstance(t *testing.T) {
|
|||
updSizeData := exData
|
||||
updSizeData.Size = 25
|
||||
|
||||
resource.ParallelTest(t, resource.TestCase{
|
||||
PreCheck: func() {
|
||||
testAccPreCheck(t)
|
||||
t.Logf(" ... working on instance %s", exData.TfName)
|
||||
testInstances = append(testInstances, exData.TfName)
|
||||
},
|
||||
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
||||
Steps: []resource.TestStep{
|
||||
// Create and verify
|
||||
{
|
||||
Config: testutils.StringFromTemplateMust(
|
||||
"testdata/instance_template.gompl",
|
||||
exData,
|
||||
),
|
||||
Check: resource.ComposeAggregateTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(testutils.ResStr(pfx, "instance", exData.TfName), "name", exData.Name),
|
||||
resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "instance", exData.TfName), "id"),
|
||||
),
|
||||
resource.ParallelTest(
|
||||
t, resource.TestCase{
|
||||
PreCheck: func() {
|
||||
testAccPreCheck(t)
|
||||
t.Logf(" ... working on instance %s", exData.TfName)
|
||||
testInstances = append(testInstances, exData.TfName)
|
||||
},
|
||||
// Update name and verify
|
||||
{
|
||||
Config: testutils.StringFromTemplateMust(
|
||||
"testdata/instance_template.gompl",
|
||||
updNameData,
|
||||
),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(testutils.ResStr(pfx, "instance", exData.TfName), "name", updNameData.Name),
|
||||
),
|
||||
},
|
||||
// Update size and verify
|
||||
{
|
||||
Config: testutils.StringFromTemplateMust(
|
||||
"testdata/instance_template.gompl",
|
||||
updSizeData,
|
||||
),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
testutils.ResStr(pfx, "instance", exData.TfName),
|
||||
"storage.size",
|
||||
strconv.Itoa(int(updSizeData.Size)),
|
||||
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
||||
Steps: []resource.TestStep{
|
||||
// Create and verify
|
||||
{
|
||||
Config: testutils.StringFromTemplateMust(
|
||||
"testdata/instance_template.gompl",
|
||||
exData,
|
||||
),
|
||||
),
|
||||
Check: resource.ComposeAggregateTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
testutils.ResStr(pfx, "instance", exData.TfName),
|
||||
"name",
|
||||
exData.Name,
|
||||
),
|
||||
resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "instance", exData.TfName), "id"),
|
||||
),
|
||||
},
|
||||
// Update name and verify
|
||||
{
|
||||
Config: testutils.StringFromTemplateMust(
|
||||
"testdata/instance_template.gompl",
|
||||
updNameData,
|
||||
),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
testutils.ResStr(pfx, "instance", exData.TfName),
|
||||
"name",
|
||||
updNameData.Name,
|
||||
),
|
||||
),
|
||||
},
|
||||
// Update size and verify
|
||||
{
|
||||
Config: testutils.StringFromTemplateMust(
|
||||
"testdata/instance_template.gompl",
|
||||
updSizeData,
|
||||
),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
testutils.ResStr(pfx, "instance", exData.TfName),
|
||||
"storage.size",
|
||||
strconv.Itoa(int(updSizeData.Size)),
|
||||
),
|
||||
),
|
||||
},
|
||||
//// Import test
|
||||
//{
|
||||
// ResourceName: "example_resource.test",
|
||||
// ImportState: true,
|
||||
// ImportStateVerify: true,
|
||||
// },
|
||||
},
|
||||
//// Import test
|
||||
//{
|
||||
// ResourceName: "example_resource.test",
|
||||
// ImportState: true,
|
||||
// ImportStateVerify: true,
|
||||
// },
|
||||
},
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
func TestAccInstanceWithUsers(t *testing.T) {
|
||||
|
|
@ -260,29 +277,35 @@ func TestAccInstanceWithUsers(t *testing.T) {
|
|||
},
|
||||
}
|
||||
|
||||
resource.ParallelTest(t, resource.TestCase{
|
||||
PreCheck: func() {
|
||||
testAccPreCheck(t)
|
||||
t.Logf(" ... working on instance %s", data.TfName)
|
||||
testInstances = append(testInstances, data.TfName)
|
||||
},
|
||||
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
||||
Steps: []resource.TestStep{
|
||||
// Create and verify
|
||||
{
|
||||
Config: testutils.StringFromTemplateMust(
|
||||
"testdata/instance_template.gompl",
|
||||
data,
|
||||
),
|
||||
Check: resource.ComposeAggregateTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(testutils.ResStr(pfx, "instance", data.TfName), "name", data.Name),
|
||||
resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "instance", data.TfName), "id"),
|
||||
resource.TestCheckResourceAttr(testutils.ResStr(pfx, "user", userName), "name", userName),
|
||||
resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "user", userName), "id"),
|
||||
),
|
||||
resource.ParallelTest(
|
||||
t, resource.TestCase{
|
||||
PreCheck: func() {
|
||||
testAccPreCheck(t)
|
||||
t.Logf(" ... working on instance %s", data.TfName)
|
||||
testInstances = append(testInstances, data.TfName)
|
||||
},
|
||||
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
||||
Steps: []resource.TestStep{
|
||||
// Create and verify
|
||||
{
|
||||
Config: testutils.StringFromTemplateMust(
|
||||
"testdata/instance_template.gompl",
|
||||
data,
|
||||
),
|
||||
Check: resource.ComposeAggregateTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
testutils.ResStr(pfx, "instance", data.TfName),
|
||||
"name",
|
||||
data.Name,
|
||||
),
|
||||
resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "instance", data.TfName), "id"),
|
||||
resource.TestCheckResourceAttr(testutils.ResStr(pfx, "user", userName), "name", userName),
|
||||
resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "user", userName), "id"),
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
func TestAccInstanceWithDatabases(t *testing.T) {
|
||||
|
|
@ -306,32 +329,38 @@ func TestAccInstanceWithDatabases(t *testing.T) {
|
|||
},
|
||||
}
|
||||
|
||||
resource.ParallelTest(t, resource.TestCase{
|
||||
PreCheck: func() {
|
||||
testAccPreCheck(t)
|
||||
t.Logf(" ... working on instance %s", data.TfName)
|
||||
testInstances = append(testInstances, data.TfName)
|
||||
},
|
||||
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
||||
Steps: []resource.TestStep{
|
||||
// Create and verify
|
||||
{
|
||||
Config: testutils.StringFromTemplateMust(
|
||||
"testdata/instance_template.gompl",
|
||||
data,
|
||||
),
|
||||
Check: resource.ComposeAggregateTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(testutils.ResStr(pfx, "instance", data.TfName), "name", data.Name),
|
||||
resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "instance", data.TfName), "id"),
|
||||
resource.TestCheckResourceAttr(testutils.ResStr(pfx, "user", userName), "name", userName),
|
||||
resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "user", userName), "id"),
|
||||
resource.TestCheckResourceAttr(testutils.ResStr(pfx, "database", dbName), "name", dbName),
|
||||
resource.TestCheckResourceAttr(testutils.ResStr(pfx, "database", dbName), "owner", userName),
|
||||
resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "database", dbName), "id"),
|
||||
),
|
||||
resource.ParallelTest(
|
||||
t, resource.TestCase{
|
||||
PreCheck: func() {
|
||||
testAccPreCheck(t)
|
||||
t.Logf(" ... working on instance %s", data.TfName)
|
||||
testInstances = append(testInstances, data.TfName)
|
||||
},
|
||||
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
||||
Steps: []resource.TestStep{
|
||||
// Create and verify
|
||||
{
|
||||
Config: testutils.StringFromTemplateMust(
|
||||
"testdata/instance_template.gompl",
|
||||
data,
|
||||
),
|
||||
Check: resource.ComposeAggregateTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
testutils.ResStr(pfx, "instance", data.TfName),
|
||||
"name",
|
||||
data.Name,
|
||||
),
|
||||
resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "instance", data.TfName), "id"),
|
||||
resource.TestCheckResourceAttr(testutils.ResStr(pfx, "user", userName), "name", userName),
|
||||
resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "user", userName), "id"),
|
||||
resource.TestCheckResourceAttr(testutils.ResStr(pfx, "database", dbName), "name", dbName),
|
||||
resource.TestCheckResourceAttr(testutils.ResStr(pfx, "database", dbName), "owner", userName),
|
||||
resource.TestCheckResourceAttrSet(testutils.ResStr(pfx, "database", dbName), "id"),
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
// func setupMockServer() *httptest.Server {
|
||||
|
|
@ -461,7 +490,7 @@ func TestAccInstanceWithDatabases(t *testing.T) {
|
|||
// "project_id": testutils.ProjectId,
|
||||
//}
|
||||
//
|
||||
//func configResources(backupSchedule string, _ *string) string {
|
||||
// func configResources(backupSchedule string, _ *string) string {
|
||||
// return fmt.Sprintf(
|
||||
// `
|
||||
// %s
|
||||
|
|
@ -535,7 +564,7 @@ func TestAccInstanceWithDatabases(t *testing.T) {
|
|||
// )
|
||||
//}
|
||||
//
|
||||
//func TestAccPostgresFlexFlexResource(t *testing.T) {
|
||||
// func TestAccPostgresFlexFlexResource(t *testing.T) {
|
||||
// resource.ParallelTest(
|
||||
// t, resource.TestCase{
|
||||
// ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
||||
|
|
@ -954,7 +983,7 @@ func TestAccInstanceWithDatabases(t *testing.T) {
|
|||
// )
|
||||
//}
|
||||
//
|
||||
//func testAccCheckPostgresFlexDestroy(s *terraform.State) error {
|
||||
// func testAccCheckPostgresFlexDestroy(s *terraform.State) error {
|
||||
// ctx := context.Background()
|
||||
// var client *postgresflex.APIClient
|
||||
// var err error
|
||||
|
|
|
|||
|
|
@ -252,7 +252,6 @@ func (r *userResource) Create(
|
|||
model.UserId = types.Int64Value(id)
|
||||
model.Password = types.StringValue(userResp.GetPassword())
|
||||
model.Status = types.StringValue(userResp.GetStatus())
|
||||
//model.ConnectionString = types.StringValue(userResp.GetConnectionString())
|
||||
|
||||
waitResp, err := postgresflexalphaWait.GetUserByIdWaitHandler(
|
||||
ctx,
|
||||
|
|
|
|||
|
|
@ -143,7 +143,6 @@ func (d *databaseDataSource) Read(ctx context.Context, req datasource.ReadReques
|
|||
resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
|
||||
|
||||
tflog.Info(ctx, "SQL Server Flex beta database read")
|
||||
|
||||
}
|
||||
|
||||
// handleReadError centralizes API error handling for the Read operation.
|
||||
|
|
|
|||
|
|
@ -36,10 +36,6 @@ var (
|
|||
|
||||
// Define errors
|
||||
errDatabaseNotFound = errors.New("database not found")
|
||||
|
||||
// Error message constants
|
||||
extractErrorSummary = "extracting failed"
|
||||
extractErrorMessage = "Extracting identity data: %v"
|
||||
)
|
||||
|
||||
func NewDatabaseResource() resource.Resource {
|
||||
|
|
@ -186,26 +182,6 @@ func (r *databaseResource) Create(ctx context.Context, req resource.CreateReques
|
|||
payLoad.Name = data.Name.ValueStringPointer()
|
||||
payLoad.Owner = data.Owner.ValueStringPointer()
|
||||
|
||||
//_, err := wait.WaitForUserWaitHandler(
|
||||
// ctx,
|
||||
// r.client,
|
||||
// projectId,
|
||||
// instanceId,
|
||||
// region,
|
||||
// data.Owner.ValueString(),
|
||||
//).
|
||||
// SetSleepBeforeWait(10 * time.Second).
|
||||
// WaitWithContext(ctx)
|
||||
//if err != nil {
|
||||
// core.LogAndAddError(
|
||||
// ctx,
|
||||
// &resp.Diagnostics,
|
||||
// createErr,
|
||||
// fmt.Sprintf("Calling API: %v", err),
|
||||
// )
|
||||
// return
|
||||
//}
|
||||
|
||||
createResp, err := r.client.CreateDatabaseRequest(ctx, projectId, region, instanceId).
|
||||
CreateDatabaseRequestPayload(payLoad).
|
||||
Execute()
|
||||
|
|
@ -451,7 +427,9 @@ func (r *databaseResource) Delete(ctx context.Context, req resource.DeleteReques
|
|||
&resp.Diagnostics,
|
||||
"Error deleting database",
|
||||
fmt.Sprintf(
|
||||
"Calling API: %v\nname: %s, region: %s, instanceId: %s", err, databaseName, region, instanceId))
|
||||
"Calling API: %v\nname: %s, region: %s, instanceId: %s", err, databaseName, region, instanceId,
|
||||
),
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -468,7 +446,6 @@ func (r *databaseResource) ModifyPlan(
|
|||
req resource.ModifyPlanRequest,
|
||||
resp *resource.ModifyPlanResponse,
|
||||
) { // nolint:gocritic // function signature required by Terraform
|
||||
|
||||
// skip initial empty configuration to avoid follow-up errors
|
||||
if req.Config.Raw.IsNull() {
|
||||
return
|
||||
|
|
|
|||
|
|
@ -247,7 +247,6 @@ func toCreatePayload(
|
|||
conversion.StringValueToPointer(model.Version),
|
||||
),
|
||||
}, nil
|
||||
|
||||
}
|
||||
|
||||
func toUpdatePayload(
|
||||
|
|
|
|||
|
|
@ -54,7 +54,7 @@ type InstanceResourceIdentityModel struct {
|
|||
}
|
||||
|
||||
func (r *instanceResource) Metadata(
|
||||
ctx context.Context,
|
||||
_ context.Context,
|
||||
req resource.MetadataRequest,
|
||||
resp *resource.MetadataResponse,
|
||||
) {
|
||||
|
|
@ -64,7 +64,7 @@ func (r *instanceResource) Metadata(
|
|||
//go:embed planModifiers.yaml
|
||||
var modifiersFileByte []byte
|
||||
|
||||
func (r *instanceResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
|
||||
func (r *instanceResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
|
||||
s := sqlserverflexalphaResGen.InstanceResourceSchema(ctx)
|
||||
|
||||
fields, err := utils.ReadModifiersConfig(modifiersFileByte)
|
||||
|
|
@ -147,7 +147,6 @@ func (r *instanceResource) ModifyPlan(
|
|||
req resource.ModifyPlanRequest,
|
||||
resp *resource.ModifyPlanResponse,
|
||||
) { // nolint:gocritic // function signature required by Terraform
|
||||
|
||||
// skip initial empty configuration to avoid follow-up errors
|
||||
if req.Config.Raw.IsNull() {
|
||||
return
|
||||
|
|
|
|||
|
|
@ -288,8 +288,8 @@ func TestAccInstanceNoEncryption(t *testing.T) {
|
|||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption"),
|
||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_id"),
|
||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_version"),
|
||||
//resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"),
|
||||
//resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"),
|
||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"),
|
||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"),
|
||||
|
||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.access_scope"),
|
||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.acl"),
|
||||
|
|
@ -389,8 +389,8 @@ func TestAccInstanceEncryption(t *testing.T) {
|
|||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption"),
|
||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_id"),
|
||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_version"),
|
||||
//resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"),
|
||||
//resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"),
|
||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"),
|
||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"),
|
||||
|
||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.access_scope"),
|
||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.acl"),
|
||||
|
|
|
|||
|
|
@ -20,8 +20,6 @@ import (
|
|||
|
||||
var _ datasource.DataSource = (*userDataSource)(nil)
|
||||
|
||||
const errorPrefix = "[sqlserverflexalpha - User]"
|
||||
|
||||
func NewUserDataSource() datasource.DataSource {
|
||||
return &userDataSource{}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -20,7 +20,6 @@ import (
|
|||
|
||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexalpha"
|
||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
|
||||
sqlserverflexalphagen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/user/resources_gen"
|
||||
sqlserverflexalphaUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexalpha/utils"
|
||||
sqlserverflexalphaWait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexalpha"
|
||||
|
||||
|
|
@ -59,7 +58,7 @@ type userResource struct {
|
|||
providerData core.ProviderData
|
||||
}
|
||||
|
||||
func (r *userResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
|
||||
func (r *userResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
|
||||
resp.TypeName = req.ProviderTypeName + "_sqlserverflexalpha_user"
|
||||
}
|
||||
|
||||
|
|
@ -118,7 +117,7 @@ var modifiersFileByte []byte
|
|||
|
||||
// Schema defines the schema for the resource.
|
||||
func (r *userResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
|
||||
s := sqlserverflexalphagen.UserResourceSchema(ctx)
|
||||
s := sqlserverflexalphaResGen.UserResourceSchema(ctx)
|
||||
|
||||
fields, err := utils.ReadModifiersConfig(modifiersFileByte)
|
||||
if err != nil {
|
||||
|
|
@ -470,7 +469,6 @@ func (r *userResource) Delete(
|
|||
// Delete existing record set
|
||||
_, err = sqlserverflexalphaWait.DeleteUserWaitHandler(ctx, r.client, projectId, region, instanceId, userId).
|
||||
WaitWithContext(ctx)
|
||||
// err := r.client.DeleteUserRequest(ctx, arg.projectId, arg.region, arg.instanceId, userId).Execute()
|
||||
if err != nil {
|
||||
core.LogAndAddError(ctx, &resp.Diagnostics, "User Delete Error", fmt.Sprintf("Calling API: %v", err))
|
||||
return
|
||||
|
|
|
|||
|
|
@ -143,7 +143,6 @@ func (d *databaseDataSource) Read(ctx context.Context, req datasource.ReadReques
|
|||
resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
|
||||
|
||||
tflog.Info(ctx, "SQL Server Flex beta database read")
|
||||
|
||||
}
|
||||
|
||||
// handleReadError centralizes API error handling for the Read operation.
|
||||
|
|
|
|||
|
|
@ -36,10 +36,6 @@ var (
|
|||
|
||||
// Define errors
|
||||
errDatabaseNotFound = errors.New("database not found")
|
||||
|
||||
// Error message constants
|
||||
extractErrorSummary = "extracting failed"
|
||||
extractErrorMessage = "Extracting identity data: %v"
|
||||
)
|
||||
|
||||
func NewDatabaseResource() resource.Resource {
|
||||
|
|
@ -430,7 +426,9 @@ func (r *databaseResource) Delete(ctx context.Context, req resource.DeleteReques
|
|||
&resp.Diagnostics,
|
||||
"Error deleting database",
|
||||
fmt.Sprintf(
|
||||
"Calling API: %v\nname: %s, region: %s, instanceId: %s", err, databaseName, region, instanceId))
|
||||
"Calling API: %v\nname: %s, region: %s, instanceId: %s", err, databaseName, region, instanceId,
|
||||
),
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -449,7 +447,6 @@ func (r *databaseResource) ModifyPlan(
|
|||
req resource.ModifyPlanRequest,
|
||||
resp *resource.ModifyPlanResponse,
|
||||
) { // nolint:gocritic // function signature required by Terraform
|
||||
|
||||
// skip initial empty configuration to avoid follow-up errors
|
||||
if req.Config.Raw.IsNull() {
|
||||
return
|
||||
|
|
@ -559,45 +556,4 @@ func (r *databaseResource) ImportState(
|
|||
tflog.Info(ctx, "Sqlserverflexbeta database state imported")
|
||||
}
|
||||
|
||||
// extractIdentityData extracts essential identifiers from the resource model, falling back to the identity model.
|
||||
func (r *databaseResource) extractIdentityData(
|
||||
model resourceModel,
|
||||
identity DatabaseResourceIdentityModel,
|
||||
) (projectId, region, instanceId, databaseName string, err error) {
|
||||
if !model.Name.IsNull() && !model.Name.IsUnknown() {
|
||||
databaseName = model.Name.ValueString()
|
||||
} else {
|
||||
if identity.DatabaseName.IsNull() || identity.DatabaseName.IsUnknown() {
|
||||
return "", "", "", "", fmt.Errorf("database_name not found in config")
|
||||
}
|
||||
databaseName = identity.DatabaseName.ValueString()
|
||||
}
|
||||
|
||||
if !model.ProjectId.IsNull() && !model.ProjectId.IsUnknown() {
|
||||
projectId = model.ProjectId.ValueString()
|
||||
} else {
|
||||
if identity.ProjectID.IsNull() || identity.ProjectID.IsUnknown() {
|
||||
return "", "", "", "", fmt.Errorf("project_id not found in config")
|
||||
}
|
||||
projectId = identity.ProjectID.ValueString()
|
||||
}
|
||||
|
||||
if !model.Region.IsNull() && !model.Region.IsUnknown() {
|
||||
region = r.providerData.GetRegionWithOverride(model.Region)
|
||||
} else {
|
||||
if identity.Region.IsNull() || identity.Region.IsUnknown() {
|
||||
return "", "", "", "", fmt.Errorf("region not found in config")
|
||||
}
|
||||
region = r.providerData.GetRegionWithOverride(identity.Region)
|
||||
}
|
||||
|
||||
if !model.InstanceId.IsNull() && !model.InstanceId.IsUnknown() {
|
||||
instanceId = model.InstanceId.ValueString()
|
||||
} else {
|
||||
if identity.InstanceID.IsNull() || identity.InstanceID.IsUnknown() {
|
||||
return "", "", "", "", fmt.Errorf("instance_id not found in config")
|
||||
}
|
||||
instanceId = identity.InstanceID.ValueString()
|
||||
}
|
||||
return projectId, region, instanceId, databaseName, nil
|
||||
}
|
||||
// extractIdentityData extracts essential identifiers from the resource model, falling back to the identity mode
|
||||
|
|
|
|||
|
|
@ -236,7 +236,6 @@ func toCreatePayload(
|
|||
conversion.StringValueToPointer(model.Version),
|
||||
),
|
||||
}, nil
|
||||
|
||||
}
|
||||
|
||||
func toUpdatePayload(
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ import (
|
|||
"github.com/hashicorp/terraform-plugin-framework/resource"
|
||||
"github.com/hashicorp/terraform-plugin-framework/types"
|
||||
"github.com/stackitcloud/stackit-sdk-go/core/utils"
|
||||
|
||||
sqlserverflexbetaPkgGen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
|
||||
sqlserverflexbetaRs "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/instance/resources_gen"
|
||||
)
|
||||
|
|
@ -28,11 +29,13 @@ func Test_handleDSEncryption(t *testing.T) {
|
|||
// TODO: Add test cases.
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if got := handleDSEncryption(tt.args.m, tt.args.resp); !reflect.DeepEqual(got, tt.want) {
|
||||
t.Errorf("handleDSEncryption() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
t.Run(
|
||||
tt.name, func(t *testing.T) {
|
||||
if got := handleDSEncryption(t.Context(), tt.args.m, tt.args.resp); !reflect.DeepEqual(got, tt.want) {
|
||||
t.Errorf("handleDSEncryption() = %v, want %v", got, tt.want)
|
||||
}
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -86,11 +89,13 @@ func Test_handleEncryption(t *testing.T) {
|
|||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if got := handleEncryption(tt.args.m, tt.args.resp); !reflect.DeepEqual(got, tt.want) {
|
||||
t.Errorf("handleEncryption() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
t.Run(
|
||||
tt.name, func(t *testing.T) {
|
||||
if got := handleEncryption(t.Context(), tt.args.m, tt.args.resp); !reflect.DeepEqual(got, tt.want) {
|
||||
t.Errorf("handleEncryption() = %v, want %v", got, tt.want)
|
||||
}
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -109,11 +114,18 @@ func Test_mapDataResponseToModel(t *testing.T) {
|
|||
// TODO: Add test cases.
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if err := mapDataResponseToModel(tt.args.ctx, tt.args.resp, tt.args.m, tt.args.tfDiags); (err != nil) != tt.wantErr {
|
||||
t.Errorf("mapDataResponseToModel() error = %v, wantErr %v", err, tt.wantErr)
|
||||
}
|
||||
})
|
||||
t.Run(
|
||||
tt.name, func(t *testing.T) {
|
||||
if err := mapDataResponseToModel(
|
||||
tt.args.ctx,
|
||||
tt.args.resp,
|
||||
tt.args.m,
|
||||
tt.args.tfDiags,
|
||||
); (err != nil) != tt.wantErr {
|
||||
t.Errorf("mapDataResponseToModel() error = %v, wantErr %v", err, tt.wantErr)
|
||||
}
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -132,11 +144,18 @@ func Test_mapResponseToModel(t *testing.T) {
|
|||
// TODO: Add test cases.
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if err := mapResponseToModel(tt.args.ctx, tt.args.resp, tt.args.m, tt.args.tfDiags); (err != nil) != tt.wantErr {
|
||||
t.Errorf("mapResponseToModel() error = %v, wantErr %v", err, tt.wantErr)
|
||||
}
|
||||
})
|
||||
t.Run(
|
||||
tt.name, func(t *testing.T) {
|
||||
if err := mapResponseToModel(
|
||||
tt.args.ctx,
|
||||
tt.args.resp,
|
||||
tt.args.m,
|
||||
tt.args.tfDiags,
|
||||
); (err != nil) != tt.wantErr {
|
||||
t.Errorf("mapResponseToModel() error = %v, wantErr %v", err, tt.wantErr)
|
||||
}
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -208,19 +227,18 @@ func Test_toCreatePayload(t *testing.T) {
|
|||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got, err := toCreatePayload(tt.args.ctx, tt.args.model)
|
||||
if (err != nil) != tt.wantErr {
|
||||
t.Errorf("toCreatePayload() error = %v, wantErr %v", err, tt.wantErr)
|
||||
return
|
||||
}
|
||||
if diff := cmp.Diff(tt.want, got); diff != "" {
|
||||
t.Errorf("model mismatch (-want +got):\n%s", diff)
|
||||
}
|
||||
//if !reflect.DeepEqual(got, tt.want) {
|
||||
// t.Errorf("toCreatePayload() got = %v, want %v", got, tt.want)
|
||||
//}
|
||||
})
|
||||
t.Run(
|
||||
tt.name, func(t *testing.T) {
|
||||
got, err := toCreatePayload(tt.args.ctx, tt.args.model)
|
||||
if (err != nil) != tt.wantErr {
|
||||
t.Errorf("toCreatePayload() error = %v, wantErr %v", err, tt.wantErr)
|
||||
return
|
||||
}
|
||||
if diff := cmp.Diff(tt.want, got); diff != "" {
|
||||
t.Errorf("model mismatch (-want +got):\n%s", diff)
|
||||
}
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -239,15 +257,17 @@ func Test_toUpdatePayload(t *testing.T) {
|
|||
// TODO: Add test cases.
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got, err := toUpdatePayload(tt.args.ctx, tt.args.m, tt.args.resp)
|
||||
if (err != nil) != tt.wantErr {
|
||||
t.Errorf("toUpdatePayload() error = %v, wantErr %v", err, tt.wantErr)
|
||||
return
|
||||
}
|
||||
if !reflect.DeepEqual(got, tt.want) {
|
||||
t.Errorf("toUpdatePayload() got = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
t.Run(
|
||||
tt.name, func(t *testing.T) {
|
||||
got, err := toUpdatePayload(tt.args.ctx, tt.args.m, tt.args.resp)
|
||||
if (err != nil) != tt.wantErr {
|
||||
t.Errorf("toUpdatePayload() error = %v, wantErr %v", err, tt.wantErr)
|
||||
return
|
||||
}
|
||||
if !reflect.DeepEqual(got, tt.want) {
|
||||
t.Errorf("toUpdatePayload() got = %v, want %v", got, tt.want)
|
||||
}
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -54,7 +54,7 @@ type InstanceResourceIdentityModel struct {
|
|||
}
|
||||
|
||||
func (r *instanceResource) Metadata(
|
||||
ctx context.Context,
|
||||
_ context.Context,
|
||||
req resource.MetadataRequest,
|
||||
resp *resource.MetadataResponse,
|
||||
) {
|
||||
|
|
@ -64,7 +64,7 @@ func (r *instanceResource) Metadata(
|
|||
//go:embed planModifiers.yaml
|
||||
var modifiersFileByte []byte
|
||||
|
||||
func (r *instanceResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
|
||||
func (r *instanceResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
|
||||
s := sqlserverflexbetaResGen.InstanceResourceSchema(ctx)
|
||||
|
||||
fields, err := utils.ReadModifiersConfig(modifiersFileByte)
|
||||
|
|
@ -147,7 +147,6 @@ func (r *instanceResource) ModifyPlan(
|
|||
req resource.ModifyPlanRequest,
|
||||
resp *resource.ModifyPlanResponse,
|
||||
) { // nolint:gocritic // function signature required by Terraform
|
||||
|
||||
// skip initial empty configuration to avoid follow-up errors
|
||||
if req.Config.Raw.IsNull() {
|
||||
return
|
||||
|
|
|
|||
|
|
@ -349,8 +349,8 @@ func TestAccInstanceNoEncryption(t *testing.T) {
|
|||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption"),
|
||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_id"),
|
||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_version"),
|
||||
//resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"),
|
||||
//resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"),
|
||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"),
|
||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"),
|
||||
|
||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.access_scope"),
|
||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.acl"),
|
||||
|
|
@ -450,8 +450,8 @@ func TestAccInstanceEncryption(t *testing.T) {
|
|||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption"),
|
||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_id"),
|
||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_version"),
|
||||
//resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"),
|
||||
//resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"),
|
||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.kek_key_ring_id"),
|
||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "encryption.service_account"),
|
||||
|
||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.access_scope"),
|
||||
// resource.TestCheckResourceAttrSet(resName("instance", data.TfName), "network.acl"),
|
||||
|
|
|
|||
|
|
@ -20,8 +20,6 @@ import (
|
|||
|
||||
var _ datasource.DataSource = (*userDataSource)(nil)
|
||||
|
||||
const errorPrefix = "[Sqlserverflexbeta - User]"
|
||||
|
||||
func NewUserDataSource() datasource.DataSource {
|
||||
return &userDataSource{}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -20,7 +20,6 @@ import (
|
|||
|
||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/sqlserverflexbeta"
|
||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/conversion"
|
||||
sqlserverflexbetagen "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/user/resources_gen"
|
||||
sqlserverflexbetaUtils "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/sqlserverflexbeta/utils"
|
||||
sqlserverflexbetaWait "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/wait/sqlserverflexbeta"
|
||||
|
||||
|
|
@ -59,7 +58,7 @@ type userResource struct {
|
|||
providerData core.ProviderData
|
||||
}
|
||||
|
||||
func (r *userResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
|
||||
func (r *userResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
|
||||
resp.TypeName = req.ProviderTypeName + "_sqlserverflexbeta_user"
|
||||
}
|
||||
|
||||
|
|
@ -108,23 +107,23 @@ func (r *userResource) ModifyPlan(
|
|||
}
|
||||
|
||||
//// TODO: verify if this is needed - START
|
||||
//var planRoles []string
|
||||
//diags := planModel.Roles.ElementsAs(ctx, &planRoles, false)
|
||||
//resp.Diagnostics.Append(diags...)
|
||||
//if diags.HasError() {
|
||||
// var planRoles []string
|
||||
// diags := planModel.Roles.ElementsAs(ctx, &planRoles, false)
|
||||
// resp.Diagnostics.Append(diags...)
|
||||
// if diags.HasError() {
|
||||
// return
|
||||
//}
|
||||
//slices.Sort(planRoles)
|
||||
//var roles []attr.Value
|
||||
//for _, role := range planRoles {
|
||||
// slices.Sort(planRoles)
|
||||
// var roles []attr.Value
|
||||
// for _, role := range planRoles {
|
||||
// roles = append(roles, types.StringValue(string(role)))
|
||||
//}
|
||||
//rolesSet, diags := types.ListValue(types.StringType, roles)
|
||||
//resp.Diagnostics.Append(diags...)
|
||||
//if diags.HasError() {
|
||||
// rolesSet, diags := types.ListValue(types.StringType, roles)
|
||||
// resp.Diagnostics.Append(diags...)
|
||||
// if diags.HasError() {
|
||||
// return
|
||||
//}
|
||||
//planModel.Roles = rolesSet
|
||||
// planModel.Roles = rolesSet
|
||||
//// TODO: verify if this is needed - END
|
||||
|
||||
resp.Diagnostics.Append(resp.Plan.Set(ctx, planModel)...)
|
||||
|
|
@ -138,7 +137,7 @@ var modifiersFileByte []byte
|
|||
|
||||
// Schema defines the schema for the resource.
|
||||
func (r *userResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) {
|
||||
s := sqlserverflexbetagen.UserResourceSchema(ctx)
|
||||
s := sqlserverflexbetaResGen.UserResourceSchema(ctx)
|
||||
|
||||
fields, err := utils.ReadModifiersConfig(modifiersFileByte)
|
||||
if err != nil {
|
||||
|
|
@ -435,7 +434,12 @@ func (r *userResource) Update(
|
|||
resp *resource.UpdateResponse,
|
||||
) { // nolint:gocritic // function signature required by Terraform
|
||||
// Update shouldn't be called
|
||||
core.LogAndAddError(ctx, &resp.Diagnostics, "Error updating user", "an SQL server user can not be updated, only created")
|
||||
core.LogAndAddError(
|
||||
ctx,
|
||||
&resp.Diagnostics,
|
||||
"Error updating user",
|
||||
"an SQL server user can not be updated, only created",
|
||||
)
|
||||
}
|
||||
|
||||
// Delete deletes the resource and removes the Terraform state on success.
|
||||
|
|
@ -489,7 +493,6 @@ func (r *userResource) Delete(
|
|||
// Delete existing record set
|
||||
_, err = sqlserverflexbetaWait.DeleteUserWaitHandler(ctx, r.client, projectId, region, instanceId, userId).
|
||||
WaitWithContext(ctx)
|
||||
// err := r.client.DeleteUserRequest(ctx, arg.projectId, arg.region, arg.instanceId, userId).Execute()
|
||||
if err != nil {
|
||||
core.LogAndAddError(ctx, &resp.Diagnostics, "User Delete Error", fmt.Sprintf("Calling API: %v", err))
|
||||
return
|
||||
|
|
|
|||
|
|
@ -108,7 +108,7 @@ func CreateInstanceWaitHandler(
|
|||
)
|
||||
if extendedTimeout < 3 {
|
||||
maxWait += time.Minute * 5
|
||||
extendedTimeout = extendedTimeout + 1
|
||||
extendedTimeout++
|
||||
if *s.Network.AccessScope == "SNA" {
|
||||
ready := true
|
||||
if s.Network == nil || s.Network.InstanceAddress == nil {
|
||||
|
|
@ -228,7 +228,7 @@ func GetUserByIdWaitHandler(
|
|||
if userId > math.MaxInt32 {
|
||||
return false, nil, fmt.Errorf("userId value is too big for int32")
|
||||
}
|
||||
userId32 := int32(userId)
|
||||
userId32 := int32(userId) //nolint:gosec // we need to convert databaseId to int32 because API expects int32
|
||||
s, err := a.GetUserRequestExecute(ctx, projectId, region, instanceId, userId32)
|
||||
if err != nil {
|
||||
var oapiErr *oapierror.GenericOpenAPIError
|
||||
|
|
@ -239,9 +239,11 @@ func GetUserByIdWaitHandler(
|
|||
switch oapiErr.StatusCode {
|
||||
case http.StatusBadGateway, http.StatusGatewayTimeout, http.StatusServiceUnavailable:
|
||||
case http.StatusNotFound:
|
||||
tflog.Warn(ctx, "api responded with status", map[string]interface{}{
|
||||
"status": oapiErr.StatusCode,
|
||||
})
|
||||
tflog.Warn(
|
||||
ctx, "api responded with status", map[string]interface{}{
|
||||
"status": oapiErr.StatusCode,
|
||||
},
|
||||
)
|
||||
return false, nil, nil
|
||||
default:
|
||||
return false, nil, err
|
||||
|
|
@ -262,7 +264,7 @@ func GetDatabaseByIdWaitHandler(
|
|||
) *wait.AsyncActionHandler[postgresflex.GetDatabaseResponse] {
|
||||
handler := wait.New(
|
||||
func() (waitFinished bool, response *postgresflex.GetDatabaseResponse, err error) {
|
||||
dbId32 := int32(databaseId)
|
||||
dbId32 := int32(databaseId) //nolint:gosec // we need to convert databaseId to int32 because API expects int32
|
||||
s, err := a.GetDatabaseRequestExecute(ctx, projectId, region, instanceId, dbId32)
|
||||
if err != nil {
|
||||
var oapiErr *oapierror.GenericOpenAPIError
|
||||
|
|
@ -272,14 +274,18 @@ func GetDatabaseByIdWaitHandler(
|
|||
}
|
||||
switch oapiErr.StatusCode {
|
||||
case http.StatusBadGateway, http.StatusGatewayTimeout, http.StatusServiceUnavailable:
|
||||
tflog.Warn(ctx, "api responded with 50[2,3,4] status", map[string]interface{}{
|
||||
"status": oapiErr.StatusCode,
|
||||
})
|
||||
tflog.Warn(
|
||||
ctx, "api responded with 50[2,3,4] status", map[string]interface{}{
|
||||
"status": oapiErr.StatusCode,
|
||||
},
|
||||
)
|
||||
return false, nil, nil
|
||||
case http.StatusNotFound:
|
||||
tflog.Warn(ctx, "api responded with 404 status", map[string]interface{}{
|
||||
"status": oapiErr.StatusCode,
|
||||
})
|
||||
tflog.Warn(
|
||||
ctx, "api responded with 404 status", map[string]interface{}{
|
||||
"status": oapiErr.StatusCode,
|
||||
},
|
||||
)
|
||||
return false, nil, nil
|
||||
default:
|
||||
return false, nil, err
|
||||
|
|
|
|||
|
|
@ -54,7 +54,12 @@ type APIClientInterface interface {
|
|||
instanceId string,
|
||||
) (*sqlserverflex.ListRolesResponse, error)
|
||||
|
||||
ListUsersRequest(ctx context.Context, projectId string, region string, instanceId string) sqlserverflex.ApiListUsersRequestRequest
|
||||
ListUsersRequest(
|
||||
ctx context.Context,
|
||||
projectId string,
|
||||
region string,
|
||||
instanceId string,
|
||||
) sqlserverflex.ApiListUsersRequestRequest
|
||||
|
||||
ListUsersRequestExecute(
|
||||
ctx context.Context,
|
||||
|
|
@ -256,7 +261,10 @@ func CreateDatabaseWaitHandler(
|
|||
var oapiErr *oapierror.GenericOpenAPIError
|
||||
ok := errors.As(err, &oapiErr)
|
||||
if !ok {
|
||||
return false, nil, fmt.Errorf("get database - could not convert error to oapierror.GenericOpenAPIError: %s", err.Error())
|
||||
return false, nil, fmt.Errorf(
|
||||
"get database - could not convert error to oapierror.GenericOpenAPIError: %s",
|
||||
err.Error(),
|
||||
)
|
||||
}
|
||||
if oapiErr.StatusCode != http.StatusNotFound {
|
||||
return false, nil, err
|
||||
|
|
@ -318,7 +326,10 @@ func WaitForUserWaitHandler(
|
|||
var oapiErr *oapierror.GenericOpenAPIError
|
||||
ok := errors.As(err, &oapiErr)
|
||||
if !ok {
|
||||
return false, nil, fmt.Errorf("Wait (list users) could not convert error to oapierror.GenericOpenAPIError: %s", err.Error())
|
||||
return false, nil, fmt.Errorf(
|
||||
"wait (list users) could not convert error to oapierror.GenericOpenAPIError: %s",
|
||||
err.Error(),
|
||||
)
|
||||
}
|
||||
if oapiErr.StatusCode != http.StatusNotFound {
|
||||
return false, nil, err
|
||||
|
|
|
|||
|
|
@ -116,7 +116,6 @@ func (a *apiClientInstanceMocked) GetInstanceRequestExecute(
|
|||
}, nil
|
||||
}
|
||||
func TestCreateInstanceWaitHandler(t *testing.T) {
|
||||
//stateSuccess := utils.Ptr(InstanceStateSuccess)
|
||||
instanceId := utils.Ptr("foo")
|
||||
tests := []struct {
|
||||
desc string
|
||||
|
|
@ -160,7 +159,7 @@ func TestCreateInstanceWaitHandler(t *testing.T) {
|
|||
// Storage: nil,
|
||||
// Version: nil,
|
||||
// },
|
||||
//},
|
||||
// },
|
||||
{
|
||||
desc: "create_failed",
|
||||
instanceId: *instanceId,
|
||||
|
|
|
|||
|
|
@ -54,7 +54,12 @@ type APIClientInterface interface {
|
|||
instanceId string,
|
||||
) (*sqlserverflex.ListRolesResponse, error)
|
||||
|
||||
ListUsersRequest(ctx context.Context, projectId string, region string, instanceId string) sqlserverflex.ApiListUsersRequestRequest
|
||||
ListUsersRequest(
|
||||
ctx context.Context,
|
||||
projectId string,
|
||||
region string,
|
||||
instanceId string,
|
||||
) sqlserverflex.ApiListUsersRequestRequest
|
||||
|
||||
ListUsersRequestExecute(
|
||||
ctx context.Context,
|
||||
|
|
@ -162,9 +167,17 @@ func CreateInstanceWaitHandler(
|
|||
}
|
||||
return true, s, nil
|
||||
case strings.ToLower(InstanceStateUnknown):
|
||||
return true, nil, fmt.Errorf("create failed for instance %s with status %s", instanceId, InstanceStateUnknown)
|
||||
return true, nil, fmt.Errorf(
|
||||
"create failed for instance %s with status %s",
|
||||
instanceId,
|
||||
InstanceStateUnknown,
|
||||
)
|
||||
case strings.ToLower(InstanceStateFailed):
|
||||
return true, nil, fmt.Errorf("create failed for instance %s with status %s", instanceId, InstanceStateFailed)
|
||||
return true, nil, fmt.Errorf(
|
||||
"create failed for instance %s with status %s",
|
||||
instanceId,
|
||||
InstanceStateFailed,
|
||||
)
|
||||
case strings.ToLower(InstanceStatePending), strings.ToLower(InstanceStateProcessing):
|
||||
tflog.Info(
|
||||
ctx, "request is being handled", map[string]interface{}{
|
||||
|
|
@ -268,7 +281,10 @@ func CreateDatabaseWaitHandler(
|
|||
var oapiErr *oapierror.GenericOpenAPIError
|
||||
ok := errors.As(err, &oapiErr)
|
||||
if !ok {
|
||||
return false, nil, fmt.Errorf("get database - could not convert error to oapierror.GenericOpenAPIError: %s", err.Error())
|
||||
return false, nil, fmt.Errorf(
|
||||
"get database - could not convert error to oapierror.GenericOpenAPIError: %s",
|
||||
err.Error(),
|
||||
)
|
||||
}
|
||||
if oapiErr.StatusCode != http.StatusNotFound {
|
||||
return false, nil, err
|
||||
|
|
@ -330,7 +346,10 @@ func WaitForUserWaitHandler(
|
|||
var oapiErr *oapierror.GenericOpenAPIError
|
||||
ok := errors.As(err, &oapiErr)
|
||||
if !ok {
|
||||
return false, nil, fmt.Errorf("Wait (list users) could not convert error to oapierror.GenericOpenAPIError: %s", err.Error())
|
||||
return false, nil, fmt.Errorf(
|
||||
"wait (list users) could not convert error to oapierror.GenericOpenAPIError: %s",
|
||||
err.Error(),
|
||||
)
|
||||
}
|
||||
if oapiErr.StatusCode != http.StatusNotFound {
|
||||
return false, nil, err
|
||||
|
|
|
|||
|
|
@ -116,7 +116,6 @@ func (a *apiClientInstanceMocked) GetInstanceRequestExecute(
|
|||
}, nil
|
||||
}
|
||||
func TestCreateInstanceWaitHandler(t *testing.T) {
|
||||
//stateSuccess := utils.Ptr(InstanceStateSuccess)
|
||||
instanceId := utils.Ptr("foo")
|
||||
tests := []struct {
|
||||
desc string
|
||||
|
|
@ -160,7 +159,7 @@ func TestCreateInstanceWaitHandler(t *testing.T) {
|
|||
// Storage: nil,
|
||||
// Version: nil,
|
||||
// },
|
||||
//},
|
||||
// },
|
||||
{
|
||||
desc: "create_failed",
|
||||
instanceId: *instanceId,
|
||||
|
|
|
|||
|
|
@ -46,6 +46,7 @@ var (
|
|||
_ provider.Provider = &Provider{}
|
||||
)
|
||||
|
||||
//nolint:unused // These constants are defined for future use in retry logic for HTTP requests, which is not yet implemented.
|
||||
const (
|
||||
// maxRetries is the maximum number of retries for a failed HTTP request.
|
||||
maxRetries = 3
|
||||
|
|
@ -123,6 +124,7 @@ type providerModel struct {
|
|||
|
||||
// Schema defines the provider-level schema for configuration data.
|
||||
func (p *Provider) Schema(_ context.Context, _ provider.SchemaRequest, resp *provider.SchemaResponse) {
|
||||
//nolint:gosec // These are just descriptions, not actual credentials or sensitive information.
|
||||
descriptions := map[string]string{
|
||||
"credentials_path": "Path of JSON from where the credentials are read. Takes precedence over the env var `STACKIT_CREDENTIALS_PATH`. Default value is `~/.stackit/credentials.json`.",
|
||||
"service_account_token": "Token used for authentication. If set, the token flow will be used to authenticate all operations.",
|
||||
|
|
@ -489,7 +491,8 @@ func (p *Provider) Configure(ctx context.Context, req provider.ConfigureRequest,
|
|||
return
|
||||
}
|
||||
|
||||
//roundTripper := core.NewRetryRoundTripper(
|
||||
//nolint:gocritic // maybe later in the code
|
||||
// roundTripper := core.NewRetryRoundTripper(
|
||||
// baseRoundTripper,
|
||||
// maxRetries,
|
||||
// initialDelay,
|
||||
|
|
|
|||
|
|
@ -12,11 +12,12 @@ import (
|
|||
"time"
|
||||
|
||||
"github.com/golang-jwt/jwt/v5"
|
||||
test "github.com/hashicorp/terraform-plugin-testing/helper/resource"
|
||||
"github.com/google/go-cmp/cmp"
|
||||
test "github.com/hashicorp/terraform-plugin-testing/helper/resource" //nolint:staticcheck // used for acceptance testing
|
||||
"github.com/jarcoal/httpmock"
|
||||
"github.com/stackitcloud/stackit-sdk-go/core/clients"
|
||||
"github.com/stackitcloud/stackit-sdk-go/core/utils"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/pkg_gen/postgresflexalpha"
|
||||
postgresFlexAlphaFlavor "tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/stackit/internal/services/postgresflexalpha/flavor"
|
||||
|
||||
|
|
@ -40,7 +41,7 @@ import (
|
|||
"tf-provider.git.onstackit.cloud/stackit-dev-tools/terraform-provider-stackitprivatepreview/internal/testutils"
|
||||
|
||||
"github.com/hashicorp/terraform-plugin-testing/config"
|
||||
"github.com/hashicorp/terraform-plugin-testing/helper/resource"
|
||||
"github.com/hashicorp/terraform-plugin-testing/helper/resource" //nolint:staticcheck // used for acceptance testing
|
||||
)
|
||||
|
||||
//go:embed testdata/provider-credentials.tf
|
||||
|
|
@ -67,12 +68,15 @@ func TestMshTest(t *testing.T) {
|
|||
|
||||
testutils.ActivateEnvironmentHttpMocks()
|
||||
|
||||
httpmock.RegisterResponder("POST", `https://service-account.api.stackit.cloud/token`,
|
||||
func(req *http.Request) (*http.Response, error) {
|
||||
token := jwt.NewWithClaims(jwt.SigningMethodHS256, jwt.MapClaims{
|
||||
"foo": "bar",
|
||||
"nbf": time.Date(2015, 10, 10, 12, 0, 0, 0, time.UTC).Unix(),
|
||||
})
|
||||
httpmock.RegisterResponder(
|
||||
"POST", `https://service-account.api.stackit.cloud/token`,
|
||||
func(_ *http.Request) (*http.Response, error) {
|
||||
token := jwt.NewWithClaims(
|
||||
jwt.SigningMethodHS256, jwt.MapClaims{
|
||||
"foo": "bar",
|
||||
"nbf": time.Date(2015, 10, 10, 12, 0, 0, 0, time.UTC).Unix(),
|
||||
},
|
||||
)
|
||||
// Sign and get the complete encoded token as a string using the secret
|
||||
tokenString, err := token.SignedString([]byte("mySecret"))
|
||||
if err != nil {
|
||||
|
|
@ -88,10 +92,13 @@ func TestMshTest(t *testing.T) {
|
|||
}
|
||||
|
||||
return httpmock.NewJsonResponse(http.StatusOK, tR)
|
||||
})
|
||||
},
|
||||
)
|
||||
|
||||
httpmock.RegisterResponder("GET", `https://postgres-flex-service.api.eu01.stackit.cloud/v3alpha1/projects/xyz-project-id/regions/eu01/flavors?page=1&size=25&sort=id.asc`,
|
||||
func(req *http.Request) (*http.Response, error) {
|
||||
httpmock.RegisterResponder(
|
||||
"GET",
|
||||
`https://postgres-flex-service.api.eu01.stackit.cloud/v3alpha1/projects/xyz-project-id/regions/eu01/flavors?page=1&size=25&sort=id.asc`,
|
||||
func(_ *http.Request) (*http.Response, error) {
|
||||
res := postgresflexalpha.GetFlavorsResponse{
|
||||
Flavors: &[]postgresflexalpha.ListFlavors{
|
||||
{
|
||||
|
|
@ -120,15 +127,17 @@ func TestMshTest(t *testing.T) {
|
|||
},
|
||||
)
|
||||
|
||||
test.Test(t, test.TestCase{
|
||||
IsUnitTest: true,
|
||||
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
||||
Steps: []test.TestStep{
|
||||
{
|
||||
ConfigVariables: map[string]config.Variable{
|
||||
"project_id": config.StringVariable("xyz-project-id"),
|
||||
},
|
||||
Config: fmt.Sprintf(`
|
||||
test.Test(
|
||||
t, test.TestCase{
|
||||
IsUnitTest: true,
|
||||
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
||||
Steps: []test.TestStep{
|
||||
{
|
||||
ConfigVariables: map[string]config.Variable{
|
||||
"project_id": config.StringVariable("xyz-project-id"),
|
||||
},
|
||||
Config: fmt.Sprintf(
|
||||
`
|
||||
provider "stackitprivatepreview" {
|
||||
default_region = "%[1]s"
|
||||
service_account_key_path = "%[2]s"
|
||||
|
|
@ -144,12 +153,13 @@ func TestMshTest(t *testing.T) {
|
|||
node_type = "Single"
|
||||
storage_class = "premium-perf2-stackit"
|
||||
}`,
|
||||
os.Getenv("TF_ACC_REGION"),
|
||||
os.Getenv("TF_ACC_SERVICE_ACCOUNT_FILE"),
|
||||
),
|
||||
os.Getenv("TF_ACC_REGION"),
|
||||
os.Getenv("TF_ACC_SERVICE_ACCOUNT_FILE"),
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
func TestUnitProviderHasChildDataSources_Basic(t *testing.T) {
|
||||
|
|
@ -171,12 +181,24 @@ func TestUnitProviderHasChildDataSources_Basic(t *testing.T) {
|
|||
sqlserverFlexBetaUser.NewUserDataSource(),
|
||||
sqlserverFlexBetaFlavor.NewFlavorDataSource(),
|
||||
}
|
||||
datasources := stackit.New("testing")().(*stackit.Provider).DataSources(context.Background())
|
||||
provider, ok := stackit.New("testing")().(*stackit.Provider)
|
||||
if !ok {
|
||||
t.Fatal("could not assert provider type")
|
||||
}
|
||||
datasources := provider.DataSources(context.Background())
|
||||
|
||||
if !reflect.DeepEqual(len(expectedDataSources), len(datasources)) {
|
||||
for _, d := range datasources {
|
||||
require.Containsf(t, expectedDataSources, d(), "Data source %+v was not expected", reflect.TypeOf(d()))
|
||||
}
|
||||
expectedMap := map[string]struct{}{}
|
||||
for _, d := range expectedDataSources {
|
||||
expectedMap[reflect.TypeOf(d).String()] = struct{}{}
|
||||
}
|
||||
|
||||
actualMap := map[string]struct{}{}
|
||||
for _, d := range datasources {
|
||||
actualMap[reflect.TypeOf(d()).String()] = struct{}{}
|
||||
}
|
||||
|
||||
if diff := cmp.Diff(expectedMap, actualMap); diff != "" {
|
||||
t.Errorf("DataSources mismatch (-expected +actual):\n%s", diff)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -194,12 +216,24 @@ func TestUnitProviderHasChildResources_Basic(t *testing.T) {
|
|||
sqlserverFlexBetaUser.NewUserResource(),
|
||||
sqlserverflexBetaDatabase.NewDatabaseResource(),
|
||||
}
|
||||
resources := stackit.New("testing")().(*stackit.Provider).Resources(context.Background())
|
||||
provider, ok := stackit.New("testing")().(*stackit.Provider)
|
||||
if !ok {
|
||||
t.Fatal("could not assert provider type")
|
||||
}
|
||||
resources := provider.Resources(context.Background())
|
||||
|
||||
if !reflect.DeepEqual(len(expectedResources), len(resources)) {
|
||||
for _, d := range resources {
|
||||
require.Containsf(t, expectedResources, d(), "Resource %+v was not expected", reflect.TypeOf(d()))
|
||||
}
|
||||
expectedMap := map[string]struct{}{}
|
||||
for _, r := range expectedResources {
|
||||
expectedMap[reflect.TypeOf(r).String()] = struct{}{}
|
||||
}
|
||||
|
||||
actualMap := map[string]struct{}{}
|
||||
for _, r := range resources {
|
||||
actualMap[reflect.TypeOf(r()).String()] = struct{}{}
|
||||
}
|
||||
|
||||
if diff := cmp.Diff(expectedMap, actualMap); diff != "" {
|
||||
t.Errorf("Resources mismatch (-expected +actual):\n%s", diff)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -209,23 +243,25 @@ func TestAccEnvVarServiceAccountPathValid(t *testing.T) {
|
|||
if v := os.Getenv(resource.EnvTfAcc); v == "" {
|
||||
t.Skipf(
|
||||
"Acceptance tests skipped unless env '%s' set",
|
||||
resource.EnvTfAcc)
|
||||
resource.EnvTfAcc,
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
// t.Setenv("STACKIT_CREDENTIALS_PATH", "")
|
||||
tempHomeFolder := testutils.CreateTemporaryHome(true, t)
|
||||
defer testutils.CleanupTemporaryHome(tempHomeFolder, t)
|
||||
resource.Test(t, resource.TestCase{
|
||||
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
PreConfig: func() { testutils.SetTemporaryHome(tempHomeFolder) },
|
||||
ConfigVariables: testConfigProviderCredentials,
|
||||
Config: providerCredentialConfig,
|
||||
resource.Test(
|
||||
t, resource.TestCase{
|
||||
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
PreConfig: func() { testutils.SetTemporaryHome(tempHomeFolder) },
|
||||
ConfigVariables: testConfigProviderCredentials,
|
||||
Config: providerCredentialConfig,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
func TestAccEnvVarServiceAccountPathInvalid(t *testing.T) {
|
||||
|
|
@ -233,17 +269,19 @@ func TestAccEnvVarServiceAccountPathInvalid(t *testing.T) {
|
|||
t.Setenv("STACKIT_CREDENTIALS_PATH", "")
|
||||
tempHomeFolder := testutils.CreateTemporaryHome(false, t)
|
||||
defer testutils.CleanupTemporaryHome(tempHomeFolder, t)
|
||||
resource.Test(t, resource.TestCase{
|
||||
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
PreConfig: func() { testutils.SetTemporaryHome(tempHomeFolder) },
|
||||
ConfigVariables: testConfigProviderCredentials,
|
||||
Config: providerCredentialConfig,
|
||||
ExpectError: regexp.MustCompile(`undefined response type, status code 401`),
|
||||
resource.Test(
|
||||
t, resource.TestCase{
|
||||
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
PreConfig: func() { testutils.SetTemporaryHome(tempHomeFolder) },
|
||||
ConfigVariables: testConfigProviderCredentials,
|
||||
Config: providerCredentialConfig,
|
||||
ExpectError: regexp.MustCompile(`undefined response type, status code 401`),
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
func TestAccCredentialsFileValid(t *testing.T) {
|
||||
|
|
@ -251,16 +289,18 @@ func TestAccCredentialsFileValid(t *testing.T) {
|
|||
t.Setenv("STACKIT_CREDENTIALS_PATH", "")
|
||||
tempHomeFolder := testutils.CreateTemporaryHome(true, t)
|
||||
defer testutils.CleanupTemporaryHome(tempHomeFolder, t)
|
||||
resource.Test(t, resource.TestCase{
|
||||
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
PreConfig: func() { testutils.SetTemporaryHome(tempHomeFolder) },
|
||||
ConfigVariables: testConfigProviderCredentials,
|
||||
Config: providerCredentialConfig,
|
||||
resource.Test(
|
||||
t, resource.TestCase{
|
||||
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
PreConfig: func() { testutils.SetTemporaryHome(tempHomeFolder) },
|
||||
ConfigVariables: testConfigProviderCredentials,
|
||||
Config: providerCredentialConfig,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
func TestAccCredentialsFileInvalid(t *testing.T) {
|
||||
|
|
@ -268,17 +308,19 @@ func TestAccCredentialsFileInvalid(t *testing.T) {
|
|||
t.Setenv("STACKIT_CREDENTIALS_PATH", "")
|
||||
tempHomeFolder := testutils.CreateTemporaryHome(false, t)
|
||||
defer testutils.CleanupTemporaryHome(tempHomeFolder, t)
|
||||
resource.Test(t, resource.TestCase{
|
||||
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
PreConfig: func() { testutils.SetTemporaryHome(tempHomeFolder) },
|
||||
ConfigVariables: testConfigProviderCredentials,
|
||||
Config: providerCredentialConfig,
|
||||
ExpectError: regexp.MustCompile(`Jwt is not in(\r\n|\r|\n)the form of Header.Payload.Signature`),
|
||||
resource.Test(
|
||||
t, resource.TestCase{
|
||||
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
PreConfig: func() { testutils.SetTemporaryHome(tempHomeFolder) },
|
||||
ConfigVariables: testConfigProviderCredentials,
|
||||
Config: providerCredentialConfig,
|
||||
ExpectError: regexp.MustCompile(`Jwt is not in(\r\n|\r|\n)the form of Header.Payload.Signature`),
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
func TestAccProviderConfigureValidValues(t *testing.T) {
|
||||
|
|
@ -287,21 +329,25 @@ func TestAccProviderConfigureValidValues(t *testing.T) {
|
|||
if v := os.Getenv(resource.EnvTfAcc); v == "" {
|
||||
t.Skipf(
|
||||
"Acceptance tests skipped unless env '%s' set",
|
||||
resource.EnvTfAcc)
|
||||
resource.EnvTfAcc,
|
||||
)
|
||||
return
|
||||
}
|
||||
t.Setenv("STACKIT_CREDENTIALS_PATH", "")
|
||||
tempHomeFolder := testutils.CreateTemporaryHome(true, t)
|
||||
defer testutils.CleanupTemporaryHome(tempHomeFolder, t)
|
||||
resource.Test(t, resource.TestCase{
|
||||
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
||||
Steps: []resource.TestStep{
|
||||
{ // valid provider attributes
|
||||
ConfigVariables: testConfigProviderCredentials,
|
||||
Config: providerValidAttributes,
|
||||
resource.Test(
|
||||
t, resource.TestCase{
|
||||
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
// valid provider attributes
|
||||
ConfigVariables: testConfigProviderCredentials,
|
||||
Config: providerValidAttributes,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
func TestAccProviderConfigureAnInvalidValue(t *testing.T) {
|
||||
|
|
@ -310,21 +356,25 @@ func TestAccProviderConfigureAnInvalidValue(t *testing.T) {
|
|||
if v := os.Getenv(resource.EnvTfAcc); v == "" {
|
||||
t.Skipf(
|
||||
"Acceptance tests skipped unless env '%s' set",
|
||||
resource.EnvTfAcc)
|
||||
resource.EnvTfAcc,
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
t.Setenv("STACKIT_CREDENTIALS_PATH", "")
|
||||
tempHomeFolder := testutils.CreateTemporaryHome(true, t)
|
||||
defer testutils.CleanupTemporaryHome(tempHomeFolder, t)
|
||||
resource.Test(t, resource.TestCase{
|
||||
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
||||
Steps: []resource.TestStep{
|
||||
{ // invalid test attribute should throw an error
|
||||
ConfigVariables: testConfigProviderCredentials,
|
||||
Config: providerInvalidAttribute,
|
||||
ExpectError: regexp.MustCompile(`An argument named "test" is not expected here\.`),
|
||||
resource.Test(
|
||||
t, resource.TestCase{
|
||||
ProtoV6ProviderFactories: testutils.TestAccProtoV6ProviderFactories,
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
// invalid test attribute should throw an error
|
||||
ConfigVariables: testConfigProviderCredentials,
|
||||
Config: providerInvalidAttribute,
|
||||
ExpectError: regexp.MustCompile(`An argument named "test" is not expected here\.`),
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
//go:build tools
|
||||
|
||||
package tools
|
||||
|
||||
// Format Terraform code for use in documentation.
|
||||
|
|
@ -7,3 +9,11 @@ package tools
|
|||
|
||||
// Generate documentation.
|
||||
//go:generate go run github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs generate --provider-dir .. -provider-name stackitprivatepreview
|
||||
|
||||
import (
|
||||
_ "github.com/golangci/golangci-lint/v2/cmd/golangci-lint"
|
||||
_ "github.com/hashicorp/terraform-plugin-codegen-framework/cmd/tfplugingen-framework"
|
||||
_ "github.com/hashicorp/terraform-plugin-codegen-openapi/cmd/tfplugingen-openapi"
|
||||
_ "github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs"
|
||||
_ "golang.org/x/tools/cmd/goimports"
|
||||
)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue