Merge remote-tracking branch 'origin/master' into validate-ignore-empty-provider
This commit is contained in:
commit
7e11b97923
|
@ -6,7 +6,7 @@ orbs:
|
|||
executors:
|
||||
go:
|
||||
docker:
|
||||
- image: circleci/golang:1.15
|
||||
- image: docker.mirror.hashicorp.services/circleci/golang:1.15
|
||||
environment:
|
||||
CONSUL_VERSION: 1.7.2
|
||||
GOMAXPROCS: 4
|
||||
|
|
|
@ -9,12 +9,18 @@ UPGRADE NOTES:
|
|||
|
||||
ENHANCEMENTS:
|
||||
|
||||
* config: Terraform will now emit a warning if you declare a `backend` block in a non-root module. Terraform has always ignored such declarations, but previously did so silently. This is a warning rather than an error only because it is sometimes convenient to temporarily use a root module as if it were a child module in order to test or debug its behavior separately from its main backend. [GH-26954]
|
||||
* cli: Improved support for Windows console UI on Windows 10, including bold colors and underline for HCL diagnostics. [GH-26588]
|
||||
* cli: The family of error messages with the summary "Invalid for_each argument" will now include some additional context about which external values contributed to the result. [GH-26747]
|
||||
* cli: Typing an invalid top-level command, like `terraform destory` instead of `destroy`, will now print out a specific error message about the command being invalid, rather than just printing out the usual help directory. [GH-26967]
|
||||
* cli: Plugin crashes will now be reported with more detail, pointing out the plugin name and the method call along with the stack trace [GH-26694]
|
||||
* provisioner/remote-exec: Can now run in a mode that expects the remote system to be running Windows and excuting commands using the Windows command interpreter, rather than a Unix-style shell. Specify the `target_platform` as `"windows"` in the `connection` block. [GH-26865]
|
||||
|
||||
BUG FIXES:
|
||||
|
||||
* cli: Exit with an error if unable to gather input from the UI. For example, this may happen when running in a non-interactive environment but without `-input=false`. Previously Terraform would interpret these errors as empty strings, which could be confusing. [GH-26509]
|
||||
* cli: TF_LOG levels other than `trace` will now work correctly [GH-26632]
|
||||
* cli: Core and Provider logs can now be enabled separately for debugging, using `TF_LOG_CORE` and `TF_LOG_PROVIDER` [GH-26685]
|
||||
|
||||
## Previous Releases
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
# the officially-released binary from releases.hashicorp.com and are
|
||||
# built by the (closed-source) official release process.
|
||||
|
||||
FROM golang:alpine
|
||||
FROM docker.mirror.hashicorp.services/golang:alpine
|
||||
LABEL maintainer="HashiCorp Terraform Team <terraform@hashicorp.com>"
|
||||
|
||||
RUN apk add --no-cache git bash openssh
|
||||
|
|
6
Makefile
6
Makefile
|
@ -6,12 +6,6 @@ VERSION?="0.3.44"
|
|||
# "make protobuf".
|
||||
generate:
|
||||
go generate ./...
|
||||
# go fmt doesn't support -mod=vendor but it still wants to populate the
|
||||
# module cache with everything in go.mod even though formatting requires
|
||||
# no dependencies, and so we're disabling modules mode for this right
|
||||
# now until the "go fmt" behavior is rationalized to either support the
|
||||
# -mod= argument or _not_ try to install things.
|
||||
GO111MODULE=off go fmt command/internal_plugin_list.go > /dev/null
|
||||
|
||||
# We separate the protobuf generation because most development tasks on
|
||||
# Terraform do not involve changing protobuf files and protoc is not a
|
||||
|
|
|
@ -82,6 +82,7 @@ func parseModuleInstancePrefix(traversal hcl.Traversal) (ModuleInstance, hcl.Tra
|
|||
var mi ModuleInstance
|
||||
var diags tfdiags.Diagnostics
|
||||
|
||||
LOOP:
|
||||
for len(remain) > 0 {
|
||||
var next string
|
||||
switch tt := remain[0].(type) {
|
||||
|
@ -96,7 +97,7 @@ func parseModuleInstancePrefix(traversal hcl.Traversal) (ModuleInstance, hcl.Tra
|
|||
Detail: "Module address prefix must be followed by dot and then a name.",
|
||||
Subject: remain[0].SourceRange().Ptr(),
|
||||
})
|
||||
break
|
||||
break LOOP
|
||||
}
|
||||
|
||||
if next != "module" {
|
||||
|
@ -129,7 +130,7 @@ func parseModuleInstancePrefix(traversal hcl.Traversal) (ModuleInstance, hcl.Tra
|
|||
Detail: "Prefix \"module.\" must be followed by a module name.",
|
||||
Subject: remain[0].SourceRange().Ptr(),
|
||||
})
|
||||
break
|
||||
break LOOP
|
||||
}
|
||||
remain = remain[1:]
|
||||
step := ModuleInstanceStep{
|
||||
|
|
|
@ -354,9 +354,3 @@ This means that Terraform did not detect any differences between your
|
|||
configuration and real physical resources that exist. As a result, no
|
||||
actions need to be performed.
|
||||
`
|
||||
|
||||
const planRefreshing = `
|
||||
[reset][bold]Refreshing Terraform state in-memory prior to plan...[reset]
|
||||
The refreshed state will be used to calculate this plan, but will not be
|
||||
persisted to local or remote state storage.
|
||||
`
|
||||
|
|
|
@ -241,6 +241,56 @@ Changes to Outputs:
|
|||
}
|
||||
}
|
||||
|
||||
// Module outputs should not cause the plan to be rendered
|
||||
func TestLocal_planModuleOutputsChanged(t *testing.T) {
|
||||
b, cleanup := TestLocal(t)
|
||||
defer cleanup()
|
||||
testStateFile(t, b.StatePath, states.BuildState(func(ss *states.SyncState) {
|
||||
ss.SetOutputValue(addrs.AbsOutputValue{
|
||||
Module: addrs.RootModuleInstance.Child("mod", addrs.NoKey),
|
||||
OutputValue: addrs.OutputValue{Name: "changed"},
|
||||
}, cty.StringVal("before"), false)
|
||||
}))
|
||||
b.CLI = cli.NewMockUi()
|
||||
outDir := testTempDir(t)
|
||||
defer os.RemoveAll(outDir)
|
||||
planPath := filepath.Join(outDir, "plan.tfplan")
|
||||
op, configCleanup := testOperationPlan(t, "./testdata/plan-module-outputs-changed")
|
||||
defer configCleanup()
|
||||
op.PlanRefresh = true
|
||||
op.PlanOutPath = planPath
|
||||
cfg := cty.ObjectVal(map[string]cty.Value{
|
||||
"path": cty.StringVal(b.StatePath),
|
||||
})
|
||||
cfgRaw, err := plans.NewDynamicValue(cfg, cfg.Type())
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
op.PlanOutBackend = &plans.Backend{
|
||||
Type: "local",
|
||||
Config: cfgRaw,
|
||||
}
|
||||
run, err := b.Operation(context.Background(), op)
|
||||
if err != nil {
|
||||
t.Fatalf("bad: %s", err)
|
||||
}
|
||||
<-run.Done()
|
||||
if run.Result != backend.OperationSuccess {
|
||||
t.Fatalf("plan operation failed")
|
||||
}
|
||||
if !run.PlanEmpty {
|
||||
t.Fatal("plan should be empty")
|
||||
}
|
||||
|
||||
expectedOutput := strings.TrimSpace(`
|
||||
No changes. Infrastructure is up-to-date.
|
||||
`)
|
||||
output := b.CLI.(*cli.MockUi).OutputWriter.String()
|
||||
if !strings.Contains(output, expectedOutput) {
|
||||
t.Fatalf("Unexpected output:\n%s\n\nwant output containing:\n%s", output, expectedOutput)
|
||||
}
|
||||
}
|
||||
|
||||
func TestLocal_planTainted(t *testing.T) {
|
||||
b, cleanup := TestLocal(t)
|
||||
defer cleanup()
|
||||
|
@ -640,7 +690,7 @@ Plan: 0 to add, 0 to change, 1 to destroy.`
|
|||
}
|
||||
|
||||
func getAddrs(resources []*plans.ResourceInstanceChangeSrc) []string {
|
||||
addrs := make([]string, len(resources), len(resources))
|
||||
addrs := make([]string, len(resources))
|
||||
for i, r := range resources {
|
||||
addrs[i] = r.Addr.String()
|
||||
}
|
||||
|
|
|
@ -10,6 +10,8 @@ import (
|
|||
"github.com/hashicorp/terraform/plans"
|
||||
"github.com/hashicorp/terraform/states"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
|
||||
legacy "github.com/hashicorp/terraform/internal/legacy/terraform"
|
||||
)
|
||||
|
||||
func TestCountHook_impl(t *testing.T) {
|
||||
|
@ -19,8 +21,8 @@ func TestCountHook_impl(t *testing.T) {
|
|||
func TestCountHookPostDiff_DestroyDeposed(t *testing.T) {
|
||||
h := new(CountHook)
|
||||
|
||||
resources := map[string]*terraform.InstanceDiff{
|
||||
"lorem": &terraform.InstanceDiff{DestroyDeposed: true},
|
||||
resources := map[string]*legacy.InstanceDiff{
|
||||
"lorem": &legacy.InstanceDiff{DestroyDeposed: true},
|
||||
}
|
||||
|
||||
for k := range resources {
|
||||
|
@ -47,11 +49,11 @@ func TestCountHookPostDiff_DestroyDeposed(t *testing.T) {
|
|||
func TestCountHookPostDiff_DestroyOnly(t *testing.T) {
|
||||
h := new(CountHook)
|
||||
|
||||
resources := map[string]*terraform.InstanceDiff{
|
||||
"foo": &terraform.InstanceDiff{Destroy: true},
|
||||
"bar": &terraform.InstanceDiff{Destroy: true},
|
||||
"lorem": &terraform.InstanceDiff{Destroy: true},
|
||||
"ipsum": &terraform.InstanceDiff{Destroy: true},
|
||||
resources := map[string]*legacy.InstanceDiff{
|
||||
"foo": &legacy.InstanceDiff{Destroy: true},
|
||||
"bar": &legacy.InstanceDiff{Destroy: true},
|
||||
"lorem": &legacy.InstanceDiff{Destroy: true},
|
||||
"ipsum": &legacy.InstanceDiff{Destroy: true},
|
||||
}
|
||||
|
||||
for k := range resources {
|
||||
|
@ -78,20 +80,20 @@ func TestCountHookPostDiff_DestroyOnly(t *testing.T) {
|
|||
func TestCountHookPostDiff_AddOnly(t *testing.T) {
|
||||
h := new(CountHook)
|
||||
|
||||
resources := map[string]*terraform.InstanceDiff{
|
||||
"foo": &terraform.InstanceDiff{
|
||||
Attributes: map[string]*terraform.ResourceAttrDiff{
|
||||
"foo": &terraform.ResourceAttrDiff{RequiresNew: true},
|
||||
resources := map[string]*legacy.InstanceDiff{
|
||||
"foo": &legacy.InstanceDiff{
|
||||
Attributes: map[string]*legacy.ResourceAttrDiff{
|
||||
"foo": &legacy.ResourceAttrDiff{RequiresNew: true},
|
||||
},
|
||||
},
|
||||
"bar": &terraform.InstanceDiff{
|
||||
Attributes: map[string]*terraform.ResourceAttrDiff{
|
||||
"foo": &terraform.ResourceAttrDiff{RequiresNew: true},
|
||||
"bar": &legacy.InstanceDiff{
|
||||
Attributes: map[string]*legacy.ResourceAttrDiff{
|
||||
"foo": &legacy.ResourceAttrDiff{RequiresNew: true},
|
||||
},
|
||||
},
|
||||
"lorem": &terraform.InstanceDiff{
|
||||
Attributes: map[string]*terraform.ResourceAttrDiff{
|
||||
"foo": &terraform.ResourceAttrDiff{RequiresNew: true},
|
||||
"lorem": &legacy.InstanceDiff{
|
||||
Attributes: map[string]*legacy.ResourceAttrDiff{
|
||||
"foo": &legacy.ResourceAttrDiff{RequiresNew: true},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -120,23 +122,23 @@ func TestCountHookPostDiff_AddOnly(t *testing.T) {
|
|||
func TestCountHookPostDiff_ChangeOnly(t *testing.T) {
|
||||
h := new(CountHook)
|
||||
|
||||
resources := map[string]*terraform.InstanceDiff{
|
||||
"foo": &terraform.InstanceDiff{
|
||||
resources := map[string]*legacy.InstanceDiff{
|
||||
"foo": &legacy.InstanceDiff{
|
||||
Destroy: false,
|
||||
Attributes: map[string]*terraform.ResourceAttrDiff{
|
||||
"foo": &terraform.ResourceAttrDiff{},
|
||||
Attributes: map[string]*legacy.ResourceAttrDiff{
|
||||
"foo": &legacy.ResourceAttrDiff{},
|
||||
},
|
||||
},
|
||||
"bar": &terraform.InstanceDiff{
|
||||
"bar": &legacy.InstanceDiff{
|
||||
Destroy: false,
|
||||
Attributes: map[string]*terraform.ResourceAttrDiff{
|
||||
"foo": &terraform.ResourceAttrDiff{},
|
||||
Attributes: map[string]*legacy.ResourceAttrDiff{
|
||||
"foo": &legacy.ResourceAttrDiff{},
|
||||
},
|
||||
},
|
||||
"lorem": &terraform.InstanceDiff{
|
||||
"lorem": &legacy.InstanceDiff{
|
||||
Destroy: false,
|
||||
Attributes: map[string]*terraform.ResourceAttrDiff{
|
||||
"foo": &terraform.ResourceAttrDiff{},
|
||||
Attributes: map[string]*legacy.ResourceAttrDiff{
|
||||
"foo": &legacy.ResourceAttrDiff{},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -197,11 +199,11 @@ func TestCountHookPostDiff_Mixed(t *testing.T) {
|
|||
func TestCountHookPostDiff_NoChange(t *testing.T) {
|
||||
h := new(CountHook)
|
||||
|
||||
resources := map[string]*terraform.InstanceDiff{
|
||||
"foo": &terraform.InstanceDiff{},
|
||||
"bar": &terraform.InstanceDiff{},
|
||||
"lorem": &terraform.InstanceDiff{},
|
||||
"ipsum": &terraform.InstanceDiff{},
|
||||
resources := map[string]*legacy.InstanceDiff{
|
||||
"foo": &legacy.InstanceDiff{},
|
||||
"bar": &legacy.InstanceDiff{},
|
||||
"lorem": &legacy.InstanceDiff{},
|
||||
"ipsum": &legacy.InstanceDiff{},
|
||||
}
|
||||
|
||||
for k := range resources {
|
||||
|
@ -261,23 +263,23 @@ func TestCountHookPostDiff_DataSource(t *testing.T) {
|
|||
func TestCountHookApply_ChangeOnly(t *testing.T) {
|
||||
h := new(CountHook)
|
||||
|
||||
resources := map[string]*terraform.InstanceDiff{
|
||||
"foo": &terraform.InstanceDiff{
|
||||
resources := map[string]*legacy.InstanceDiff{
|
||||
"foo": &legacy.InstanceDiff{
|
||||
Destroy: false,
|
||||
Attributes: map[string]*terraform.ResourceAttrDiff{
|
||||
"foo": &terraform.ResourceAttrDiff{},
|
||||
Attributes: map[string]*legacy.ResourceAttrDiff{
|
||||
"foo": &legacy.ResourceAttrDiff{},
|
||||
},
|
||||
},
|
||||
"bar": &terraform.InstanceDiff{
|
||||
"bar": &legacy.InstanceDiff{
|
||||
Destroy: false,
|
||||
Attributes: map[string]*terraform.ResourceAttrDiff{
|
||||
"foo": &terraform.ResourceAttrDiff{},
|
||||
Attributes: map[string]*legacy.ResourceAttrDiff{
|
||||
"foo": &legacy.ResourceAttrDiff{},
|
||||
},
|
||||
},
|
||||
"lorem": &terraform.InstanceDiff{
|
||||
"lorem": &legacy.InstanceDiff{
|
||||
Destroy: false,
|
||||
Attributes: map[string]*terraform.ResourceAttrDiff{
|
||||
"foo": &terraform.ResourceAttrDiff{},
|
||||
Attributes: map[string]*legacy.ResourceAttrDiff{
|
||||
"foo": &legacy.ResourceAttrDiff{},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -306,11 +308,11 @@ func TestCountHookApply_ChangeOnly(t *testing.T) {
|
|||
func TestCountHookApply_DestroyOnly(t *testing.T) {
|
||||
h := new(CountHook)
|
||||
|
||||
resources := map[string]*terraform.InstanceDiff{
|
||||
"foo": &terraform.InstanceDiff{Destroy: true},
|
||||
"bar": &terraform.InstanceDiff{Destroy: true},
|
||||
"lorem": &terraform.InstanceDiff{Destroy: true},
|
||||
"ipsum": &terraform.InstanceDiff{Destroy: true},
|
||||
resources := map[string]*legacy.InstanceDiff{
|
||||
"foo": &legacy.InstanceDiff{Destroy: true},
|
||||
"bar": &legacy.InstanceDiff{Destroy: true},
|
||||
"lorem": &legacy.InstanceDiff{Destroy: true},
|
||||
"ipsum": &legacy.InstanceDiff{Destroy: true},
|
||||
}
|
||||
|
||||
for k := range resources {
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
module "mod" {
|
||||
source = "./mod"
|
||||
}
|
|
@ -0,0 +1,3 @@
|
|||
output "changed" {
|
||||
value = "after"
|
||||
}
|
|
@ -5,7 +5,7 @@ import (
|
|||
|
||||
cleanhttp "github.com/hashicorp/go-cleanhttp"
|
||||
"github.com/hashicorp/terraform/backend"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
"github.com/hashicorp/terraform/internal/legacy/helper/schema"
|
||||
"github.com/hashicorp/terraform/states/remote"
|
||||
"github.com/hashicorp/terraform/states/statemgr"
|
||||
artifactory "github.com/lusis/go-artifactory/src/artifactory.v401"
|
||||
|
|
|
@ -5,7 +5,7 @@ import (
|
|||
"fmt"
|
||||
|
||||
"github.com/hashicorp/terraform/backend"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
"github.com/hashicorp/terraform/internal/legacy/helper/schema"
|
||||
)
|
||||
|
||||
// New creates a new backend for Azure remote state.
|
||||
|
|
|
@ -8,7 +8,7 @@ import (
|
|||
|
||||
consulapi "github.com/hashicorp/consul/api"
|
||||
"github.com/hashicorp/terraform/backend"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
"github.com/hashicorp/terraform/internal/legacy/helper/schema"
|
||||
)
|
||||
|
||||
// New creates a new backend for Consul remote state.
|
||||
|
|
|
@ -9,7 +9,7 @@ import (
|
|||
"time"
|
||||
|
||||
"github.com/hashicorp/terraform/backend"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
"github.com/hashicorp/terraform/internal/legacy/helper/schema"
|
||||
"github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/common"
|
||||
"github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/common/profile"
|
||||
tag "github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/tag/v20180813"
|
||||
|
|
|
@ -8,7 +8,7 @@ import (
|
|||
|
||||
etcdapi "github.com/coreos/etcd/client"
|
||||
"github.com/hashicorp/terraform/backend"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
"github.com/hashicorp/terraform/internal/legacy/helper/schema"
|
||||
"github.com/hashicorp/terraform/states/remote"
|
||||
"github.com/hashicorp/terraform/states/statemgr"
|
||||
)
|
||||
|
|
|
@ -6,7 +6,7 @@ import (
|
|||
etcdv3 "github.com/coreos/etcd/clientv3"
|
||||
"github.com/coreos/etcd/pkg/transport"
|
||||
"github.com/hashicorp/terraform/backend"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
"github.com/hashicorp/terraform/internal/legacy/helper/schema"
|
||||
)
|
||||
|
||||
const (
|
||||
|
|
|
@ -11,8 +11,8 @@ import (
|
|||
|
||||
"cloud.google.com/go/storage"
|
||||
"github.com/hashicorp/terraform/backend"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
"github.com/hashicorp/terraform/httpclient"
|
||||
"github.com/hashicorp/terraform/internal/legacy/helper/schema"
|
||||
"golang.org/x/oauth2"
|
||||
"golang.org/x/oauth2/jwt"
|
||||
"google.golang.org/api/option"
|
||||
|
|
|
@ -11,7 +11,7 @@ import (
|
|||
"github.com/hashicorp/go-cleanhttp"
|
||||
"github.com/hashicorp/go-retryablehttp"
|
||||
"github.com/hashicorp/terraform/backend"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
"github.com/hashicorp/terraform/internal/legacy/helper/schema"
|
||||
"github.com/hashicorp/terraform/states/remote"
|
||||
"github.com/hashicorp/terraform/states/statemgr"
|
||||
)
|
||||
|
|
|
@ -9,7 +9,7 @@ import (
|
|||
"time"
|
||||
|
||||
"github.com/hashicorp/terraform/backend"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
"github.com/hashicorp/terraform/internal/legacy/helper/schema"
|
||||
statespkg "github.com/hashicorp/terraform/states"
|
||||
"github.com/hashicorp/terraform/states/remote"
|
||||
"github.com/hashicorp/terraform/states/statemgr"
|
||||
|
|
|
@ -8,7 +8,7 @@ import (
|
|||
"os"
|
||||
|
||||
"github.com/hashicorp/terraform/backend"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
"github.com/hashicorp/terraform/internal/legacy/helper/schema"
|
||||
"github.com/hashicorp/terraform/version"
|
||||
"github.com/mitchellh/cli"
|
||||
"github.com/mitchellh/go-homedir"
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -11,7 +11,7 @@ import (
|
|||
"github.com/hashicorp/errwrap"
|
||||
"github.com/hashicorp/go-multierror"
|
||||
"github.com/hashicorp/terraform/backend"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
"github.com/hashicorp/terraform/internal/legacy/helper/schema"
|
||||
triton "github.com/joyent/triton-go"
|
||||
"github.com/joyent/triton-go/authentication"
|
||||
"github.com/joyent/triton-go/storage"
|
||||
|
|
|
@ -25,7 +25,7 @@ import (
|
|||
"github.com/aliyun/aliyun-tablestore-go-sdk/tablestore"
|
||||
"github.com/hashicorp/go-cleanhttp"
|
||||
"github.com/hashicorp/terraform/backend"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
"github.com/hashicorp/terraform/internal/legacy/helper/schema"
|
||||
"github.com/hashicorp/terraform/version"
|
||||
"github.com/jmespath/go-jmespath"
|
||||
"github.com/mitchellh/go-homedir"
|
||||
|
|
|
@ -6,7 +6,7 @@ import (
|
|||
"fmt"
|
||||
|
||||
"github.com/hashicorp/terraform/backend"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
"github.com/hashicorp/terraform/internal/legacy/helper/schema"
|
||||
"github.com/lib/pq"
|
||||
)
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ import (
|
|||
"github.com/aws/aws-sdk-go/service/s3"
|
||||
awsbase "github.com/hashicorp/aws-sdk-go-base"
|
||||
"github.com/hashicorp/terraform/backend"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
"github.com/hashicorp/terraform/internal/legacy/helper/schema"
|
||||
"github.com/hashicorp/terraform/internal/logging"
|
||||
"github.com/hashicorp/terraform/version"
|
||||
)
|
||||
|
|
|
@ -12,7 +12,7 @@ import (
|
|||
"github.com/gophercloud/utils/terraform/auth"
|
||||
|
||||
"github.com/hashicorp/terraform/backend"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
"github.com/hashicorp/terraform/internal/legacy/helper/schema"
|
||||
"github.com/hashicorp/terraform/version"
|
||||
)
|
||||
|
||||
|
|
|
@ -85,6 +85,12 @@ type Remote struct {
|
|||
|
||||
// opLock locks operations
|
||||
opLock sync.Mutex
|
||||
|
||||
// ignoreVersionConflict, if true, will disable the requirement that the
|
||||
// local Terraform version matches the remote workspace's configured
|
||||
// version. This will also cause VerifyWorkspaceTerraformVersion to return
|
||||
// a warning diagnostic instead of an error.
|
||||
ignoreVersionConflict bool
|
||||
}
|
||||
|
||||
var _ backend.Backend = (*Remote)(nil)
|
||||
|
@ -629,6 +635,17 @@ func (b *Remote) StateMgr(name string) (statemgr.Full, error) {
|
|||
}
|
||||
}
|
||||
|
||||
// This is a fallback error check. Most code paths should use other
|
||||
// mechanisms to check the version, then set the ignoreVersionConflict
|
||||
// field to true. This check is only in place to ensure that we don't
|
||||
// accidentally upgrade state with a new code path, and the version check
|
||||
// logic is coarser and simpler.
|
||||
if !b.ignoreVersionConflict {
|
||||
if workspace.TerraformVersion != tfversion.String() {
|
||||
return nil, fmt.Errorf("Remote workspace Terraform version %q does not match local Terraform version %q", workspace.TerraformVersion, tfversion.String())
|
||||
}
|
||||
}
|
||||
|
||||
client := &remoteClient{
|
||||
client: b.client,
|
||||
organization: b.organization,
|
||||
|
@ -676,9 +693,17 @@ func (b *Remote) Operation(ctx context.Context, op *backend.Operation) (*backend
|
|||
|
||||
// Check if we need to use the local backend to run the operation.
|
||||
if b.forceLocal || !w.Operations {
|
||||
if !w.Operations {
|
||||
// Workspace is explicitly configured for local operations, so its
|
||||
// configured Terraform version is meaningless
|
||||
b.IgnoreVersionConflict()
|
||||
}
|
||||
return b.local.Operation(ctx, op)
|
||||
}
|
||||
|
||||
// Running remotely so we don't care about version conflicts
|
||||
b.IgnoreVersionConflict()
|
||||
|
||||
// Set the remote workspace name.
|
||||
op.Workspace = w.Name
|
||||
|
||||
|
@ -837,6 +862,101 @@ func (b *Remote) ReportResult(op *backend.RunningOperation, err error) {
|
|||
}
|
||||
}
|
||||
|
||||
// IgnoreVersionConflict allows commands to disable the fall-back check that
|
||||
// the local Terraform version matches the remote workspace's configured
|
||||
// Terraform version. This should be called by commands where this check is
|
||||
// unnecessary, such as those performing remote operations, or read-only
|
||||
// operations. It will also be called if the user uses a command-line flag to
|
||||
// override this check.
|
||||
func (b *Remote) IgnoreVersionConflict() {
|
||||
b.ignoreVersionConflict = true
|
||||
}
|
||||
|
||||
// VerifyWorkspaceTerraformVersion compares the local Terraform version against
|
||||
// the workspace's configured Terraform version. If they are equal, this means
|
||||
// that there are no compatibility concerns, so it returns no diagnostics.
|
||||
//
|
||||
// If the versions differ,
|
||||
func (b *Remote) VerifyWorkspaceTerraformVersion(workspaceName string) tfdiags.Diagnostics {
|
||||
var diags tfdiags.Diagnostics
|
||||
|
||||
workspace, err := b.getRemoteWorkspace(context.Background(), workspaceName)
|
||||
if err != nil {
|
||||
diags = diags.Append(tfdiags.Sourceless(
|
||||
tfdiags.Error,
|
||||
"Error looking up workspace",
|
||||
fmt.Sprintf("Workspace read failed: %s", err),
|
||||
))
|
||||
return diags
|
||||
}
|
||||
|
||||
remoteVersion, err := version.NewSemver(workspace.TerraformVersion)
|
||||
if err != nil {
|
||||
diags = diags.Append(tfdiags.Sourceless(
|
||||
tfdiags.Error,
|
||||
"Error looking up workspace",
|
||||
fmt.Sprintf("Invalid Terraform version: %s", err),
|
||||
))
|
||||
return diags
|
||||
}
|
||||
|
||||
v014 := version.Must(version.NewSemver("0.14.0"))
|
||||
if tfversion.SemVer.LessThan(v014) || remoteVersion.LessThan(v014) {
|
||||
// Versions of Terraform prior to 0.14.0 will refuse to load state files
|
||||
// written by a newer version of Terraform, even if it is only a patch
|
||||
// level difference. As a result we require an exact match.
|
||||
if tfversion.SemVer.Equal(remoteVersion) {
|
||||
return diags
|
||||
}
|
||||
}
|
||||
if tfversion.SemVer.GreaterThanOrEqual(v014) && remoteVersion.GreaterThanOrEqual(v014) {
|
||||
// Versions of Terraform after 0.14.0 should be compatible with each
|
||||
// other. At the time this code was written, the only constraints we
|
||||
// are aware of are:
|
||||
//
|
||||
// - 0.14.0 is guaranteed to be compatible with versions up to but not
|
||||
// including 1.1.0
|
||||
v110 := version.Must(version.NewSemver("1.1.0"))
|
||||
if tfversion.SemVer.LessThan(v110) && remoteVersion.LessThan(v110) {
|
||||
return diags
|
||||
}
|
||||
// - Any new Terraform state version will require at least minor patch
|
||||
// increment, so x.y.* will always be compatible with each other
|
||||
tfvs := tfversion.SemVer.Segments64()
|
||||
rwvs := remoteVersion.Segments64()
|
||||
if len(tfvs) == 3 && len(rwvs) == 3 && tfvs[0] == rwvs[0] && tfvs[1] == rwvs[1] {
|
||||
return diags
|
||||
}
|
||||
}
|
||||
|
||||
// Even if ignoring version conflicts, it may still be useful to call this
|
||||
// method and warn the user about a mismatch between the local and remote
|
||||
// Terraform versions.
|
||||
severity := tfdiags.Error
|
||||
if b.ignoreVersionConflict {
|
||||
severity = tfdiags.Warning
|
||||
}
|
||||
|
||||
suggestion := " If you're sure you want to upgrade the state, you can force Terraform to continue using the -ignore-remote-version flag. This may result in an unusable workspace."
|
||||
if b.ignoreVersionConflict {
|
||||
suggestion = ""
|
||||
}
|
||||
diags = diags.Append(tfdiags.Sourceless(
|
||||
severity,
|
||||
"Terraform version mismatch",
|
||||
fmt.Sprintf(
|
||||
"The local Terraform version (%s) does not match the configured version for remote workspace %s/%s (%s).%s",
|
||||
tfversion.String(),
|
||||
b.organization,
|
||||
workspace.Name,
|
||||
workspace.TerraformVersion,
|
||||
suggestion,
|
||||
),
|
||||
))
|
||||
|
||||
return diags
|
||||
}
|
||||
|
||||
// Colorize returns the Colorize structure that can be used for colorizing
|
||||
// output. This is guaranteed to always return a non-nil value and so useful
|
||||
// as a helper to wrap any potentially colored strings.
|
||||
|
|
|
@ -11,12 +11,14 @@ import (
|
|||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
tfe "github.com/hashicorp/go-tfe"
|
||||
version "github.com/hashicorp/go-version"
|
||||
"github.com/hashicorp/terraform/addrs"
|
||||
"github.com/hashicorp/terraform/backend"
|
||||
"github.com/hashicorp/terraform/internal/initwd"
|
||||
"github.com/hashicorp/terraform/plans/planfile"
|
||||
"github.com/hashicorp/terraform/states/statemgr"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
tfversion "github.com/hashicorp/terraform/version"
|
||||
"github.com/mitchellh/cli"
|
||||
)
|
||||
|
||||
|
@ -542,8 +544,8 @@ func TestRemote_applyApprovedExternally(t *testing.T) {
|
|||
t.Fatalf("error starting operation: %v", err)
|
||||
}
|
||||
|
||||
// Wait 2 seconds to make sure the run started.
|
||||
time.Sleep(2 * time.Second)
|
||||
// Wait 50 milliseconds to make sure the run started.
|
||||
time.Sleep(50 * time.Millisecond)
|
||||
|
||||
wl, err := b.client.Workspaces.List(
|
||||
ctx,
|
||||
|
@ -617,8 +619,8 @@ func TestRemote_applyDiscardedExternally(t *testing.T) {
|
|||
t.Fatalf("error starting operation: %v", err)
|
||||
}
|
||||
|
||||
// Wait 2 seconds to make sure the run started.
|
||||
time.Sleep(2 * time.Second)
|
||||
// Wait 50 milliseconds to make sure the run started.
|
||||
time.Sleep(50 * time.Millisecond)
|
||||
|
||||
wl, err := b.client.Workspaces.List(
|
||||
ctx,
|
||||
|
@ -871,7 +873,7 @@ func TestRemote_applyLockTimeout(t *testing.T) {
|
|||
"approve": "yes",
|
||||
})
|
||||
|
||||
op.StateLockTimeout = 5 * time.Second
|
||||
op.StateLockTimeout = 50 * time.Millisecond
|
||||
op.UIIn = input
|
||||
op.UIOut = b.CLI
|
||||
op.Workspace = backend.DefaultStateName
|
||||
|
@ -887,8 +889,8 @@ func TestRemote_applyLockTimeout(t *testing.T) {
|
|||
case <-sigint:
|
||||
// Stop redirecting SIGINT signals.
|
||||
signal.Stop(sigint)
|
||||
case <-time.After(10 * time.Second):
|
||||
t.Fatalf("expected lock timeout after 5 seconds, waited 10 seconds")
|
||||
case <-time.After(200 * time.Millisecond):
|
||||
t.Fatalf("expected lock timeout after 50 milliseconds, waited 200 milliseconds")
|
||||
}
|
||||
|
||||
if len(input.answers) != 2 {
|
||||
|
@ -1277,3 +1279,133 @@ func TestRemote_applyWithRemoteError(t *testing.T) {
|
|||
t.Fatalf("expected apply error in output: %s", output)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRemote_applyVersionCheck(t *testing.T) {
|
||||
testCases := map[string]struct {
|
||||
localVersion string
|
||||
remoteVersion string
|
||||
forceLocal bool
|
||||
hasOperations bool
|
||||
wantErr string
|
||||
}{
|
||||
"versions can be different for remote apply": {
|
||||
localVersion: "0.14.0",
|
||||
remoteVersion: "0.13.5",
|
||||
hasOperations: true,
|
||||
},
|
||||
"versions can be different for local apply": {
|
||||
localVersion: "0.14.0",
|
||||
remoteVersion: "0.13.5",
|
||||
hasOperations: false,
|
||||
},
|
||||
"error if force local, has remote operations, different versions": {
|
||||
localVersion: "0.14.0",
|
||||
remoteVersion: "0.13.5",
|
||||
forceLocal: true,
|
||||
hasOperations: true,
|
||||
wantErr: `Remote workspace Terraform version "0.13.5" does not match local Terraform version "0.14.0"`,
|
||||
},
|
||||
"no error if versions are identical": {
|
||||
localVersion: "0.14.0",
|
||||
remoteVersion: "0.14.0",
|
||||
forceLocal: true,
|
||||
hasOperations: true,
|
||||
},
|
||||
"no error if force local but workspace has remote operations disabled": {
|
||||
localVersion: "0.14.0",
|
||||
remoteVersion: "0.13.5",
|
||||
forceLocal: true,
|
||||
hasOperations: false,
|
||||
},
|
||||
}
|
||||
|
||||
for name, tc := range testCases {
|
||||
t.Run(name, func(t *testing.T) {
|
||||
b, bCleanup := testBackendDefault(t)
|
||||
defer bCleanup()
|
||||
|
||||
// SETUP: Save original local version state and restore afterwards
|
||||
p := tfversion.Prerelease
|
||||
v := tfversion.Version
|
||||
s := tfversion.SemVer
|
||||
defer func() {
|
||||
tfversion.Prerelease = p
|
||||
tfversion.Version = v
|
||||
tfversion.SemVer = s
|
||||
}()
|
||||
|
||||
// SETUP: Set local version for the test case
|
||||
tfversion.Prerelease = ""
|
||||
tfversion.Version = tc.localVersion
|
||||
tfversion.SemVer = version.Must(version.NewSemver(tc.localVersion))
|
||||
|
||||
// SETUP: Set force local for the test case
|
||||
b.forceLocal = tc.forceLocal
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
// SETUP: set the operations and Terraform Version fields on the
|
||||
// remote workspace
|
||||
_, err := b.client.Workspaces.Update(
|
||||
ctx,
|
||||
b.organization,
|
||||
b.workspace,
|
||||
tfe.WorkspaceUpdateOptions{
|
||||
Operations: tfe.Bool(tc.hasOperations),
|
||||
TerraformVersion: tfe.String(tc.remoteVersion),
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
t.Fatalf("error creating named workspace: %v", err)
|
||||
}
|
||||
|
||||
// RUN: prepare the apply operation and run it
|
||||
op, configCleanup := testOperationApply(t, "./testdata/apply")
|
||||
defer configCleanup()
|
||||
|
||||
input := testInput(t, map[string]string{
|
||||
"approve": "yes",
|
||||
})
|
||||
|
||||
op.UIIn = input
|
||||
op.UIOut = b.CLI
|
||||
op.Workspace = backend.DefaultStateName
|
||||
|
||||
run, err := b.Operation(ctx, op)
|
||||
if err != nil {
|
||||
t.Fatalf("error starting operation: %v", err)
|
||||
}
|
||||
|
||||
// RUN: wait for completion
|
||||
<-run.Done()
|
||||
|
||||
if tc.wantErr != "" {
|
||||
// ASSERT: if the test case wants an error, check for failure
|
||||
// and the error message
|
||||
if run.Result != backend.OperationFailure {
|
||||
t.Fatalf("expected run to fail, but result was %#v", run.Result)
|
||||
}
|
||||
errOutput := b.CLI.(*cli.MockUi).ErrorWriter.String()
|
||||
if !strings.Contains(errOutput, tc.wantErr) {
|
||||
t.Fatalf("missing error %q\noutput: %s", tc.wantErr, errOutput)
|
||||
}
|
||||
} else {
|
||||
// ASSERT: otherwise, check for success and appropriate output
|
||||
// based on whether the run should be local or remote
|
||||
if run.Result != backend.OperationSuccess {
|
||||
t.Fatalf("operation failed: %s", b.CLI.(*cli.MockUi).ErrorWriter.String())
|
||||
}
|
||||
output := b.CLI.(*cli.MockUi).OutputWriter.String()
|
||||
hasRemote := strings.Contains(output, "Running apply in the remote backend")
|
||||
if !tc.forceLocal && tc.hasOperations && !hasRemote {
|
||||
t.Fatalf("missing remote backend header in output: %s", output)
|
||||
} else if (tc.forceLocal || !tc.hasOperations) && hasRemote {
|
||||
t.Fatalf("unexpected remote backend header in output: %s", output)
|
||||
}
|
||||
if !strings.Contains(output, "1 added, 0 changed, 0 destroyed") {
|
||||
t.Fatalf("expected apply summary in output: %s", output)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,6 +24,13 @@ var (
|
|||
errRunOverridden = errors.New("overridden using the UI or API")
|
||||
)
|
||||
|
||||
var (
|
||||
backoffMin = 1000.0
|
||||
backoffMax = 3000.0
|
||||
|
||||
runPollInterval = 3 * time.Second
|
||||
)
|
||||
|
||||
// backoff will perform exponential backoff based on the iteration and
|
||||
// limited by the provided min and max (in milliseconds) durations.
|
||||
func backoff(min, max float64, iter int) time.Duration {
|
||||
|
@ -43,7 +50,7 @@ func (b *Remote) waitForRun(stopCtx, cancelCtx context.Context, op *backend.Oper
|
|||
return r, stopCtx.Err()
|
||||
case <-cancelCtx.Done():
|
||||
return r, cancelCtx.Err()
|
||||
case <-time.After(backoff(1000, 3000, i)):
|
||||
case <-time.After(backoff(backoffMin, backoffMax, i)):
|
||||
// Timer up, show status
|
||||
}
|
||||
|
||||
|
@ -260,7 +267,7 @@ func (b *Remote) costEstimate(stopCtx, cancelCtx context.Context, op *backend.Op
|
|||
return stopCtx.Err()
|
||||
case <-cancelCtx.Done():
|
||||
return cancelCtx.Err()
|
||||
case <-time.After(1 * time.Second):
|
||||
case <-time.After(backoff(backoffMin, backoffMax, i)):
|
||||
}
|
||||
|
||||
// Retrieve the cost estimate to get its current status.
|
||||
|
@ -454,7 +461,7 @@ func (b *Remote) confirm(stopCtx context.Context, op *backend.Operation, opts *t
|
|||
return
|
||||
case <-stopCtx.Done():
|
||||
return
|
||||
case <-time.After(3 * time.Second):
|
||||
case <-time.After(runPollInterval):
|
||||
// Retrieve the run again to get its current status.
|
||||
r, err := b.client.Runs.Read(stopCtx, r.ID)
|
||||
if err != nil {
|
||||
|
@ -488,10 +495,10 @@ func (b *Remote) confirm(stopCtx context.Context, op *backend.Operation, opts *t
|
|||
}
|
||||
|
||||
if err == errRunDiscarded {
|
||||
err = errApplyDiscarded
|
||||
if op.Destroy {
|
||||
err = errDestroyDiscarded
|
||||
}
|
||||
err = errApplyDiscarded
|
||||
}
|
||||
|
||||
result <- err
|
||||
|
|
|
@ -156,11 +156,20 @@ func (b *Remote) getRemoteWorkspaceName(localWorkspaceName string) string {
|
|||
}
|
||||
}
|
||||
|
||||
func (b *Remote) getRemoteWorkspaceID(ctx context.Context, localWorkspaceName string) (string, error) {
|
||||
func (b *Remote) getRemoteWorkspace(ctx context.Context, localWorkspaceName string) (*tfe.Workspace, error) {
|
||||
remoteWorkspaceName := b.getRemoteWorkspaceName(localWorkspaceName)
|
||||
|
||||
log.Printf("[TRACE] backend/remote: looking up workspace id for %s/%s", b.organization, remoteWorkspaceName)
|
||||
log.Printf("[TRACE] backend/remote: looking up workspace for %s/%s", b.organization, remoteWorkspaceName)
|
||||
remoteWorkspace, err := b.client.Workspaces.Read(ctx, b.organization, remoteWorkspaceName)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return remoteWorkspace, nil
|
||||
}
|
||||
|
||||
func (b *Remote) getRemoteWorkspaceID(ctx context.Context, localWorkspaceName string) (string, error) {
|
||||
remoteWorkspace, err := b.getRemoteWorkspace(ctx, localWorkspaceName)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
|
|
@ -195,7 +195,7 @@ func TestRemoteContextWithVars(t *testing.T) {
|
|||
key := "key"
|
||||
v.Key = &key
|
||||
}
|
||||
b.client.Variables.Create(nil, workspaceID, *v)
|
||||
b.client.Variables.Create(context.TODO(), workspaceID, *v)
|
||||
|
||||
_, _, diags := b.Context(op)
|
||||
|
||||
|
|
|
@ -17,6 +17,7 @@ import (
|
|||
|
||||
tfe "github.com/hashicorp/go-tfe"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
tfversion "github.com/hashicorp/terraform/version"
|
||||
"github.com/mitchellh/copystructure"
|
||||
)
|
||||
|
||||
|
@ -360,7 +361,7 @@ func (m *mockLogReader) Read(l []byte) (int, error) {
|
|||
if written, err := m.read(l); err != io.ErrNoProgress {
|
||||
return written, err
|
||||
}
|
||||
time.Sleep(500 * time.Millisecond)
|
||||
time.Sleep(1 * time.Millisecond)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1124,10 +1125,15 @@ func (m *mockWorkspaces) List(ctx context.Context, organization string, options
|
|||
}
|
||||
|
||||
func (m *mockWorkspaces) Create(ctx context.Context, organization string, options tfe.WorkspaceCreateOptions) (*tfe.Workspace, error) {
|
||||
if strings.HasSuffix(*options.Name, "no-operations") {
|
||||
options.Operations = tfe.Bool(false)
|
||||
} else if options.Operations == nil {
|
||||
options.Operations = tfe.Bool(true)
|
||||
}
|
||||
w := &tfe.Workspace{
|
||||
ID: generateID("ws-"),
|
||||
Name: *options.Name,
|
||||
Operations: !strings.HasSuffix(*options.Name, "no-operations"),
|
||||
Operations: *options.Operations,
|
||||
Permissions: &tfe.WorkspacePermissions{
|
||||
CanQueueApply: true,
|
||||
CanQueueRun: true,
|
||||
|
@ -1139,6 +1145,11 @@ func (m *mockWorkspaces) Create(ctx context.Context, organization string, option
|
|||
if options.VCSRepo != nil {
|
||||
w.VCSRepo = &tfe.VCSRepo{}
|
||||
}
|
||||
if options.TerraformVersion != nil {
|
||||
w.TerraformVersion = *options.TerraformVersion
|
||||
} else {
|
||||
w.TerraformVersion = tfversion.String()
|
||||
}
|
||||
m.workspaceIDs[w.ID] = w
|
||||
m.workspaceNames[w.Name] = w
|
||||
return w, nil
|
||||
|
@ -1171,6 +1182,9 @@ func (m *mockWorkspaces) Update(ctx context.Context, organization, workspace str
|
|||
return nil, tfe.ErrResourceNotFound
|
||||
}
|
||||
|
||||
if options.Operations != nil {
|
||||
w.Operations = *options.Operations
|
||||
}
|
||||
if options.Name != nil {
|
||||
w.Name = *options.Name
|
||||
}
|
||||
|
|
|
@ -20,6 +20,8 @@ import (
|
|||
"github.com/hashicorp/terraform/tfdiags"
|
||||
)
|
||||
|
||||
var planConfigurationVersionsPollInterval = 500 * time.Millisecond
|
||||
|
||||
func (b *Remote) opPlan(stopCtx, cancelCtx context.Context, op *backend.Operation, w *tfe.Workspace) (*tfe.Run, error) {
|
||||
log.Printf("[INFO] backend/remote: starting Plan operation")
|
||||
|
||||
|
@ -213,7 +215,7 @@ in order to capture the filesystem context the remote workspace expects:
|
|||
return nil, context.Canceled
|
||||
case <-cancelCtx.Done():
|
||||
return nil, context.Canceled
|
||||
case <-time.After(500 * time.Millisecond):
|
||||
case <-time.After(planConfigurationVersionsPollInterval):
|
||||
cv, err = b.client.ConfigurationVersions.Read(stopCtx, cv.ID)
|
||||
if err != nil {
|
||||
return nil, generalError("Failed to retrieve configuration version", err)
|
||||
|
|
|
@ -620,7 +620,7 @@ func TestRemote_planLockTimeout(t *testing.T) {
|
|||
"approve": "yes",
|
||||
})
|
||||
|
||||
op.StateLockTimeout = 5 * time.Second
|
||||
op.StateLockTimeout = 50 * time.Millisecond
|
||||
op.UIIn = input
|
||||
op.UIOut = b.CLI
|
||||
op.Workspace = backend.DefaultStateName
|
||||
|
@ -636,8 +636,8 @@ func TestRemote_planLockTimeout(t *testing.T) {
|
|||
case <-sigint:
|
||||
// Stop redirecting SIGINT signals.
|
||||
signal.Stop(sigint)
|
||||
case <-time.After(10 * time.Second):
|
||||
t.Fatalf("expected lock timeout after 5 seconds, waited 10 seconds")
|
||||
case <-time.After(200 * time.Millisecond):
|
||||
t.Fatalf("expected lock timeout after 50 milliseconds, waited 200 milliseconds")
|
||||
}
|
||||
|
||||
if len(input.answers) != 2 {
|
||||
|
|
|
@ -1,13 +1,18 @@
|
|||
package remote
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
tfe "github.com/hashicorp/go-tfe"
|
||||
version "github.com/hashicorp/go-version"
|
||||
"github.com/hashicorp/terraform-svchost/disco"
|
||||
"github.com/hashicorp/terraform/backend"
|
||||
"github.com/hashicorp/terraform/version"
|
||||
"github.com/hashicorp/terraform/tfdiags"
|
||||
tfversion "github.com/hashicorp/terraform/version"
|
||||
"github.com/zclconf/go-cty/cty"
|
||||
|
||||
backendLocal "github.com/hashicorp/terraform/backend/local"
|
||||
|
@ -196,11 +201,11 @@ func TestRemote_versionConstraints(t *testing.T) {
|
|||
}
|
||||
|
||||
// Save and restore the actual version.
|
||||
p := version.Prerelease
|
||||
v := version.Version
|
||||
p := tfversion.Prerelease
|
||||
v := tfversion.Version
|
||||
defer func() {
|
||||
version.Prerelease = p
|
||||
version.Version = v
|
||||
tfversion.Prerelease = p
|
||||
tfversion.Version = v
|
||||
}()
|
||||
|
||||
for name, tc := range cases {
|
||||
|
@ -208,8 +213,8 @@ func TestRemote_versionConstraints(t *testing.T) {
|
|||
b := New(testDisco(s))
|
||||
|
||||
// Set the version for this test.
|
||||
version.Prerelease = tc.prerelease
|
||||
version.Version = tc.version
|
||||
tfversion.Prerelease = tc.prerelease
|
||||
tfversion.Version = tc.version
|
||||
|
||||
// Validate
|
||||
_, valDiags := b.PrepareConfig(tc.config)
|
||||
|
@ -428,17 +433,17 @@ func TestRemote_checkConstraints(t *testing.T) {
|
|||
}
|
||||
|
||||
// Save and restore the actual version.
|
||||
p := version.Prerelease
|
||||
v := version.Version
|
||||
p := tfversion.Prerelease
|
||||
v := tfversion.Version
|
||||
defer func() {
|
||||
version.Prerelease = p
|
||||
version.Version = v
|
||||
tfversion.Prerelease = p
|
||||
tfversion.Version = v
|
||||
}()
|
||||
|
||||
for name, tc := range cases {
|
||||
// Set the version for this test.
|
||||
version.Prerelease = tc.prerelease
|
||||
version.Version = tc.version
|
||||
tfversion.Prerelease = tc.prerelease
|
||||
tfversion.Version = tc.version
|
||||
|
||||
// Check the constraints.
|
||||
diags := b.checkConstraints(tc.constraints)
|
||||
|
@ -448,3 +453,222 @@ func TestRemote_checkConstraints(t *testing.T) {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestRemote_StateMgr_versionCheck(t *testing.T) {
|
||||
b, bCleanup := testBackendDefault(t)
|
||||
defer bCleanup()
|
||||
|
||||
// Some fixed versions for testing with. This logic is a simple string
|
||||
// comparison, so we don't need many test cases.
|
||||
v0135 := version.Must(version.NewSemver("0.13.5"))
|
||||
v0140 := version.Must(version.NewSemver("0.14.0"))
|
||||
|
||||
// Save original local version state and restore afterwards
|
||||
p := tfversion.Prerelease
|
||||
v := tfversion.Version
|
||||
s := tfversion.SemVer
|
||||
defer func() {
|
||||
tfversion.Prerelease = p
|
||||
tfversion.Version = v
|
||||
tfversion.SemVer = s
|
||||
}()
|
||||
|
||||
// For this test, the local Terraform version is set to 0.14.0
|
||||
tfversion.Prerelease = ""
|
||||
tfversion.Version = v0140.String()
|
||||
tfversion.SemVer = v0140
|
||||
|
||||
// Update the mock remote workspace Terraform version to match the local
|
||||
// Terraform version
|
||||
if _, err := b.client.Workspaces.Update(
|
||||
context.Background(),
|
||||
b.organization,
|
||||
b.workspace,
|
||||
tfe.WorkspaceUpdateOptions{
|
||||
TerraformVersion: tfe.String(v0140.String()),
|
||||
},
|
||||
); err != nil {
|
||||
t.Fatalf("error: %v", err)
|
||||
}
|
||||
|
||||
// This should succeed
|
||||
if _, err := b.StateMgr(backend.DefaultStateName); err != nil {
|
||||
t.Fatalf("expected no error, got %v", err)
|
||||
}
|
||||
|
||||
// Now change the remote workspace to a different Terraform version
|
||||
if _, err := b.client.Workspaces.Update(
|
||||
context.Background(),
|
||||
b.organization,
|
||||
b.workspace,
|
||||
tfe.WorkspaceUpdateOptions{
|
||||
TerraformVersion: tfe.String(v0135.String()),
|
||||
},
|
||||
); err != nil {
|
||||
t.Fatalf("error: %v", err)
|
||||
}
|
||||
|
||||
// This should fail
|
||||
want := `Remote workspace Terraform version "0.13.5" does not match local Terraform version "0.14.0"`
|
||||
if _, err := b.StateMgr(backend.DefaultStateName); err.Error() != want {
|
||||
t.Fatalf("wrong error\n got: %v\nwant: %v", err.Error(), want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRemote_VerifyWorkspaceTerraformVersion(t *testing.T) {
|
||||
testCases := []struct {
|
||||
local string
|
||||
remote string
|
||||
wantErr bool
|
||||
}{
|
||||
{"0.13.5", "0.13.5", false},
|
||||
{"0.14.0", "0.13.5", true},
|
||||
{"0.14.0", "0.14.1", false},
|
||||
{"0.14.0", "1.0.99", false},
|
||||
{"0.14.0", "1.1.0", true},
|
||||
{"1.2.0", "1.2.99", false},
|
||||
{"1.2.0", "1.3.0", true},
|
||||
}
|
||||
for _, tc := range testCases {
|
||||
t.Run(fmt.Sprintf("local %s, remote %s", tc.local, tc.remote), func(t *testing.T) {
|
||||
b, bCleanup := testBackendDefault(t)
|
||||
defer bCleanup()
|
||||
|
||||
local := version.Must(version.NewSemver(tc.local))
|
||||
remote := version.Must(version.NewSemver(tc.remote))
|
||||
|
||||
// Save original local version state and restore afterwards
|
||||
p := tfversion.Prerelease
|
||||
v := tfversion.Version
|
||||
s := tfversion.SemVer
|
||||
defer func() {
|
||||
tfversion.Prerelease = p
|
||||
tfversion.Version = v
|
||||
tfversion.SemVer = s
|
||||
}()
|
||||
|
||||
// Override local version as specified
|
||||
tfversion.Prerelease = ""
|
||||
tfversion.Version = local.String()
|
||||
tfversion.SemVer = local
|
||||
|
||||
// Update the mock remote workspace Terraform version to the
|
||||
// specified remote version
|
||||
if _, err := b.client.Workspaces.Update(
|
||||
context.Background(),
|
||||
b.organization,
|
||||
b.workspace,
|
||||
tfe.WorkspaceUpdateOptions{
|
||||
TerraformVersion: tfe.String(remote.String()),
|
||||
},
|
||||
); err != nil {
|
||||
t.Fatalf("error: %v", err)
|
||||
}
|
||||
|
||||
diags := b.VerifyWorkspaceTerraformVersion(backend.DefaultStateName)
|
||||
if tc.wantErr {
|
||||
if len(diags) != 1 {
|
||||
t.Fatal("expected diag, but none returned")
|
||||
}
|
||||
if got := diags.Err().Error(); !strings.Contains(got, "Terraform version mismatch") {
|
||||
t.Fatalf("unexpected error: %s", got)
|
||||
}
|
||||
} else {
|
||||
if len(diags) != 0 {
|
||||
t.Fatalf("unexpected diags: %s", diags.Err())
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestRemote_VerifyWorkspaceTerraformVersion_workspaceErrors(t *testing.T) {
|
||||
b, bCleanup := testBackendDefault(t)
|
||||
defer bCleanup()
|
||||
|
||||
// Attempting to check the version against a workspace which doesn't exist
|
||||
// should fail
|
||||
diags := b.VerifyWorkspaceTerraformVersion("invalid-workspace")
|
||||
if len(diags) != 1 {
|
||||
t.Fatal("expected diag, but none returned")
|
||||
}
|
||||
if got := diags.Err().Error(); !strings.Contains(got, "Error looking up workspace: Workspace read failed") {
|
||||
t.Fatalf("unexpected error: %s", got)
|
||||
}
|
||||
|
||||
// Update the mock remote workspace Terraform version to an invalid version
|
||||
if _, err := b.client.Workspaces.Update(
|
||||
context.Background(),
|
||||
b.organization,
|
||||
b.workspace,
|
||||
tfe.WorkspaceUpdateOptions{
|
||||
TerraformVersion: tfe.String("1.0.cheetarah"),
|
||||
},
|
||||
); err != nil {
|
||||
t.Fatalf("error: %v", err)
|
||||
}
|
||||
diags = b.VerifyWorkspaceTerraformVersion(backend.DefaultStateName)
|
||||
|
||||
if len(diags) != 1 {
|
||||
t.Fatal("expected diag, but none returned")
|
||||
}
|
||||
if got := diags.Err().Error(); !strings.Contains(got, "Error looking up workspace: Invalid Terraform version") {
|
||||
t.Fatalf("unexpected error: %s", got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRemote_VerifyWorkspaceTerraformVersion_ignoreFlagSet(t *testing.T) {
|
||||
b, bCleanup := testBackendDefault(t)
|
||||
defer bCleanup()
|
||||
|
||||
// If the ignore flag is set, the behaviour changes
|
||||
b.IgnoreVersionConflict()
|
||||
|
||||
// Different local & remote versions to cause an error
|
||||
local := version.Must(version.NewSemver("0.14.0"))
|
||||
remote := version.Must(version.NewSemver("0.13.5"))
|
||||
|
||||
// Save original local version state and restore afterwards
|
||||
p := tfversion.Prerelease
|
||||
v := tfversion.Version
|
||||
s := tfversion.SemVer
|
||||
defer func() {
|
||||
tfversion.Prerelease = p
|
||||
tfversion.Version = v
|
||||
tfversion.SemVer = s
|
||||
}()
|
||||
|
||||
// Override local version as specified
|
||||
tfversion.Prerelease = ""
|
||||
tfversion.Version = local.String()
|
||||
tfversion.SemVer = local
|
||||
|
||||
// Update the mock remote workspace Terraform version to the
|
||||
// specified remote version
|
||||
if _, err := b.client.Workspaces.Update(
|
||||
context.Background(),
|
||||
b.organization,
|
||||
b.workspace,
|
||||
tfe.WorkspaceUpdateOptions{
|
||||
TerraformVersion: tfe.String(remote.String()),
|
||||
},
|
||||
); err != nil {
|
||||
t.Fatalf("error: %v", err)
|
||||
}
|
||||
|
||||
diags := b.VerifyWorkspaceTerraformVersion(backend.DefaultStateName)
|
||||
if len(diags) != 1 {
|
||||
t.Fatal("expected diag, but none returned")
|
||||
}
|
||||
|
||||
if got, want := diags[0].Severity(), tfdiags.Warning; got != want {
|
||||
t.Errorf("wrong severity: got %#v, want %#v", got, want)
|
||||
}
|
||||
if got, want := diags[0].Description().Summary, "Terraform version mismatch"; got != want {
|
||||
t.Errorf("wrong summary: got %s, want %s", got, want)
|
||||
}
|
||||
wantDetail := "The local Terraform version (0.14.0) does not match the configured version for remote workspace hashicorp/prod (0.13.5)."
|
||||
if got := diags[0].Description().Detail; got != wantDetail {
|
||||
t.Errorf("wrong summary: got %s, want %s", got, wantDetail)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@ import (
|
|||
"flag"
|
||||
"os"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
_ "github.com/hashicorp/terraform/internal/logging"
|
||||
)
|
||||
|
@ -14,5 +15,11 @@ func TestMain(m *testing.M) {
|
|||
// Make sure TF_FORCE_LOCAL_BACKEND is unset
|
||||
os.Unsetenv("TF_FORCE_LOCAL_BACKEND")
|
||||
|
||||
// Reduce delays to make tests run faster
|
||||
backoffMin = 1.0
|
||||
backoffMax = 1.0
|
||||
planConfigurationVersionsPollInterval = 1 * time.Millisecond
|
||||
runPollInterval = 1 * time.Millisecond
|
||||
|
||||
os.Exit(m.Run())
|
||||
}
|
||||
|
|
|
@ -1,15 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"github.com/hashicorp/terraform/builtin/providers/test"
|
||||
"github.com/hashicorp/terraform/plugin"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
)
|
||||
|
||||
func main() {
|
||||
plugin.Serve(&plugin.ServeOpts{
|
||||
ProviderFunc: func() terraform.ResourceProvider {
|
||||
return test.Provider()
|
||||
},
|
||||
})
|
||||
}
|
|
@ -1,12 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"github.com/hashicorp/terraform/builtin/provisioners/chef"
|
||||
"github.com/hashicorp/terraform/plugin"
|
||||
)
|
||||
|
||||
func main() {
|
||||
plugin.Serve(&plugin.ServeOpts{
|
||||
ProvisionerFunc: chef.Provisioner,
|
||||
})
|
||||
}
|
|
@ -1,12 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"github.com/hashicorp/terraform/builtin/provisioners/file"
|
||||
"github.com/hashicorp/terraform/plugin"
|
||||
)
|
||||
|
||||
func main() {
|
||||
plugin.Serve(&plugin.ServeOpts{
|
||||
ProvisionerFunc: file.Provisioner,
|
||||
})
|
||||
}
|
|
@ -1,12 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"github.com/hashicorp/terraform/builtin/provisioners/habitat"
|
||||
"github.com/hashicorp/terraform/plugin"
|
||||
)
|
||||
|
||||
func main() {
|
||||
plugin.Serve(&plugin.ServeOpts{
|
||||
ProvisionerFunc: habitat.Provisioner,
|
||||
})
|
||||
}
|
|
@ -1,12 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"github.com/hashicorp/terraform/builtin/provisioners/local-exec"
|
||||
"github.com/hashicorp/terraform/plugin"
|
||||
)
|
||||
|
||||
func main() {
|
||||
plugin.Serve(&plugin.ServeOpts{
|
||||
ProvisionerFunc: localexec.Provisioner,
|
||||
})
|
||||
}
|
|
@ -1,12 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"github.com/hashicorp/terraform/builtin/provisioners/puppet"
|
||||
"github.com/hashicorp/terraform/plugin"
|
||||
)
|
||||
|
||||
func main() {
|
||||
plugin.Serve(&plugin.ServeOpts{
|
||||
ProvisionerFunc: puppet.Provisioner,
|
||||
})
|
||||
}
|
|
@ -1,12 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"github.com/hashicorp/terraform/builtin/provisioners/remote-exec"
|
||||
"github.com/hashicorp/terraform/plugin"
|
||||
)
|
||||
|
||||
func main() {
|
||||
plugin.Serve(&plugin.ServeOpts{
|
||||
ProvisionerFunc: remoteexec.Provisioner,
|
||||
})
|
||||
}
|
|
@ -1,12 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"github.com/hashicorp/terraform/builtin/provisioners/salt-masterless"
|
||||
"github.com/hashicorp/terraform/plugin"
|
||||
)
|
||||
|
||||
func main() {
|
||||
plugin.Serve(&plugin.ServeOpts{
|
||||
ProvisionerFunc: saltmasterless.Provisioner,
|
||||
})
|
||||
}
|
|
@ -1,76 +0,0 @@
|
|||
package terraform
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
)
|
||||
|
||||
// remoteStateFlatten takes a structure and turns into a flat map[string]string.
|
||||
//
|
||||
// Within the "thing" parameter, only primitive values are allowed. Structs are
|
||||
// not supported. Therefore, it can only be slices, maps, primitives, and
|
||||
// any combination of those together.
|
||||
//
|
||||
// The difference between this version and the version in package flatmap is that
|
||||
// we add the count key for maps in this version, and return a normal
|
||||
// map[string]string instead of a flatmap.Map
|
||||
func remoteStateFlatten(thing map[string]interface{}) map[string]string {
|
||||
result := make(map[string]string)
|
||||
|
||||
for k, raw := range thing {
|
||||
flatten(result, k, reflect.ValueOf(raw))
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func flatten(result map[string]string, prefix string, v reflect.Value) {
|
||||
if v.Kind() == reflect.Interface {
|
||||
v = v.Elem()
|
||||
}
|
||||
|
||||
switch v.Kind() {
|
||||
case reflect.Bool:
|
||||
if v.Bool() {
|
||||
result[prefix] = "true"
|
||||
} else {
|
||||
result[prefix] = "false"
|
||||
}
|
||||
case reflect.Int:
|
||||
result[prefix] = fmt.Sprintf("%d", v.Int())
|
||||
case reflect.Map:
|
||||
flattenMap(result, prefix, v)
|
||||
case reflect.Slice:
|
||||
flattenSlice(result, prefix, v)
|
||||
case reflect.String:
|
||||
result[prefix] = v.String()
|
||||
default:
|
||||
panic(fmt.Sprintf("Unknown: %s", v))
|
||||
}
|
||||
}
|
||||
|
||||
func flattenMap(result map[string]string, prefix string, v reflect.Value) {
|
||||
mapKeys := v.MapKeys()
|
||||
|
||||
result[fmt.Sprintf("%s.%%", prefix)] = fmt.Sprintf("%d", len(mapKeys))
|
||||
for _, k := range mapKeys {
|
||||
if k.Kind() == reflect.Interface {
|
||||
k = k.Elem()
|
||||
}
|
||||
|
||||
if k.Kind() != reflect.String {
|
||||
panic(fmt.Sprintf("%s: map key is not string: %s", prefix, k))
|
||||
}
|
||||
|
||||
flatten(result, fmt.Sprintf("%s.%s", prefix, k.String()), v.MapIndex(k))
|
||||
}
|
||||
}
|
||||
|
||||
func flattenSlice(result map[string]string, prefix string, v reflect.Value) {
|
||||
prefix = prefix + "."
|
||||
|
||||
result[prefix+"#"] = fmt.Sprintf("%d", v.Len())
|
||||
for i := 0; i < v.Len(); i++ {
|
||||
flatten(result, fmt.Sprintf("%s%d", prefix, i), v.Index(i))
|
||||
}
|
||||
}
|
|
@ -17,7 +17,7 @@ type Provider struct {
|
|||
}
|
||||
|
||||
// NewProvider returns a new terraform provider
|
||||
func NewProvider() *Provider {
|
||||
func NewProvider() providers.Interface {
|
||||
return &Provider{}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,29 +1,10 @@
|
|||
package terraform
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/providers"
|
||||
|
||||
backendInit "github.com/hashicorp/terraform/backend/init"
|
||||
)
|
||||
|
||||
var testAccProviders map[string]*Provider
|
||||
var testAccProvider *Provider
|
||||
|
||||
func init() {
|
||||
// Initialize the backends
|
||||
backendInit.Init(nil)
|
||||
|
||||
testAccProvider = NewProvider()
|
||||
testAccProviders = map[string]*Provider{
|
||||
"terraform": testAccProvider,
|
||||
}
|
||||
}
|
||||
|
||||
func TestProvider_impl(t *testing.T) {
|
||||
var _ providers.Interface = NewProvider()
|
||||
}
|
||||
|
||||
func testAccPreCheck(t *testing.T) {
|
||||
}
|
||||
|
|
|
@ -1,63 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func testDataSource() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Read: testDataSourceRead,
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"list": {
|
||||
Type: schema.TypeList,
|
||||
Computed: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
},
|
||||
|
||||
"input": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
|
||||
"output": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
// this attribute is computed, but never set by the provider
|
||||
"nil": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"input_map": {
|
||||
Type: schema.TypeMap,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
Optional: true,
|
||||
},
|
||||
"output_map": {
|
||||
Type: schema.TypeMap,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
Computed: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func testDataSourceRead(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId(time.Now().UTC().String())
|
||||
d.Set("list", []interface{}{"one", "two", "three"})
|
||||
|
||||
if input, hasInput := d.GetOk("input"); hasInput {
|
||||
d.Set("output", input)
|
||||
} else {
|
||||
d.Set("output", "some output")
|
||||
}
|
||||
|
||||
if inputMap, hasInput := d.GetOk("input_map"); hasInput {
|
||||
d.Set("output_map", inputMap)
|
||||
}
|
||||
return nil
|
||||
}
|
|
@ -1,25 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func providerLabelDataSource() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Read: providerLabelDataSourceRead,
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"label": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func providerLabelDataSourceRead(d *schema.ResourceData, meta interface{}) error {
|
||||
label := meta.(string)
|
||||
d.SetId(label)
|
||||
d.Set("label", label)
|
||||
return nil
|
||||
}
|
|
@ -1,45 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
)
|
||||
|
||||
func TestProviderLabelDataSource(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: func(s *terraform.State) error {
|
||||
return nil
|
||||
},
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
Config: strings.TrimSpace(`
|
||||
provider "test" {
|
||||
label = "foo"
|
||||
}
|
||||
|
||||
data "test_provider_label" "test" {
|
||||
}
|
||||
`),
|
||||
Check: func(s *terraform.State) error {
|
||||
res, hasRes := s.RootModule().Resources["data.test_provider_label.test"]
|
||||
if !hasRes {
|
||||
return errors.New("No test_provider_label in state")
|
||||
}
|
||||
if got, want := res.Primary.ID, "foo"; got != want {
|
||||
return fmt.Errorf("wrong id %q; want %q", got, want)
|
||||
}
|
||||
if got, want := res.Primary.Attributes["label"], "foo"; got != want {
|
||||
return fmt.Errorf("wrong id %q; want %q", got, want)
|
||||
}
|
||||
return nil
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
|
@ -1,291 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
)
|
||||
|
||||
func TestDataSource_dataSourceCount(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: func(s *terraform.State) error {
|
||||
return nil
|
||||
},
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
Config: strings.TrimSpace(`
|
||||
data "test_data_source" "test" {
|
||||
count = 3
|
||||
input = "count-${count.index}"
|
||||
}
|
||||
|
||||
resource "test_resource" "foo" {
|
||||
required = "yep"
|
||||
required_map = {
|
||||
key = "value"
|
||||
}
|
||||
|
||||
list = "${data.test_data_source.test.*.output}"
|
||||
}
|
||||
`),
|
||||
Check: func(s *terraform.State) error {
|
||||
res, hasRes := s.RootModule().Resources["test_resource.foo"]
|
||||
if !hasRes {
|
||||
return errors.New("No test_resource.foo in state")
|
||||
}
|
||||
if res.Primary.Attributes["list.#"] != "3" {
|
||||
return errors.New("Wrong list.#, expected 3")
|
||||
}
|
||||
if res.Primary.Attributes["list.0"] != "count-0" {
|
||||
return errors.New("Wrong list.0, expected count-0")
|
||||
}
|
||||
if res.Primary.Attributes["list.1"] != "count-1" {
|
||||
return errors.New("Wrong list.0, expected count-1")
|
||||
}
|
||||
if res.Primary.Attributes["list.2"] != "count-2" {
|
||||
return errors.New("Wrong list.0, expected count-2")
|
||||
}
|
||||
return nil
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// Test that the output of a data source can be used as the value for
|
||||
// a "count" in a real resource. This would fail with "count cannot be computed"
|
||||
// at some point.
|
||||
func TestDataSource_valueAsResourceCount(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: func(s *terraform.State) error {
|
||||
return nil
|
||||
},
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
Config: strings.TrimSpace(`
|
||||
data "test_data_source" "test" {
|
||||
input = "4"
|
||||
}
|
||||
|
||||
resource "test_resource" "foo" {
|
||||
count = "${data.test_data_source.test.output}"
|
||||
|
||||
required = "yep"
|
||||
required_map = {
|
||||
key = "value"
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: func(s *terraform.State) error {
|
||||
count := 0
|
||||
for k, _ := range s.RootModule().Resources {
|
||||
if strings.HasPrefix(k, "test_resource.foo.") {
|
||||
count++
|
||||
}
|
||||
}
|
||||
|
||||
if count != 4 {
|
||||
return fmt.Errorf("bad count: %d", count)
|
||||
}
|
||||
return nil
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// TestDataSource_dataSourceCountGrandChild tests that a grandchild data source
|
||||
// that is based off of count works, ie: dependency chain foo -> bar -> baz.
|
||||
// This was failing because CountBoundaryTransformer is being run during apply
|
||||
// instead of plan, which meant that it wasn't firing after data sources were
|
||||
// potentially changing state and causing diff/interpolation issues.
|
||||
//
|
||||
// This happens after the initial apply, after state is saved.
|
||||
func TestDataSource_dataSourceCountGrandChild(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: func(s *terraform.State) error {
|
||||
return nil
|
||||
},
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
Config: dataSourceCountGrandChildConfig,
|
||||
},
|
||||
{
|
||||
Config: dataSourceCountGrandChildConfig,
|
||||
Check: func(s *terraform.State) error {
|
||||
for _, v := range []string{"foo", "bar", "baz"} {
|
||||
count := 0
|
||||
for k := range s.RootModule().Resources {
|
||||
if strings.HasPrefix(k, fmt.Sprintf("data.test_data_source.%s.", v)) {
|
||||
count++
|
||||
}
|
||||
}
|
||||
|
||||
if count != 2 {
|
||||
return fmt.Errorf("bad count for data.test_data_source.%s: %d", v, count)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
const dataSourceCountGrandChildConfig = `
|
||||
data "test_data_source" "foo" {
|
||||
count = 2
|
||||
input = "one"
|
||||
}
|
||||
|
||||
data "test_data_source" "bar" {
|
||||
count = "${length(data.test_data_source.foo.*.id)}"
|
||||
input = "${data.test_data_source.foo.*.output[count.index]}"
|
||||
}
|
||||
|
||||
data "test_data_source" "baz" {
|
||||
count = "${length(data.test_data_source.bar.*.id)}"
|
||||
input = "${data.test_data_source.bar.*.output[count.index]}"
|
||||
}
|
||||
`
|
||||
|
||||
func TestDataSource_nilComputedValues(t *testing.T) {
|
||||
check := func(s *terraform.State) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
Check: check,
|
||||
Config: `
|
||||
variable "index" {
|
||||
default = "d"
|
||||
}
|
||||
|
||||
locals {
|
||||
name = {
|
||||
a = "something"
|
||||
b = "else"
|
||||
}
|
||||
}
|
||||
|
||||
data "test_data_source" "x" {
|
||||
input = "${lookup(local.name, var.index, local.name["a"])}"
|
||||
}
|
||||
|
||||
data "test_data_source" "y" {
|
||||
input = data.test_data_source.x.nil == "something" ? "something" : "else"
|
||||
}`,
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// referencing test_data_source.one.output_map["a"] should produce an error when
|
||||
// there's a count.
|
||||
func TestDataSource_indexedCountOfOne(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
Config: strings.TrimSpace(`
|
||||
data "test_data_source" "one" {
|
||||
count = 1
|
||||
input_map = {
|
||||
"a" = "b"
|
||||
}
|
||||
}
|
||||
|
||||
data "test_data_source" "two" {
|
||||
input_map = {
|
||||
"x" = data.test_data_source.one.output_map["a"]
|
||||
}
|
||||
}
|
||||
`),
|
||||
ExpectError: regexp.MustCompile("Because data.test_data_source.one has \"count\" set, its attributes must be accessed on specific instances"),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// Verify that we can destroy when a data source references something with a
|
||||
// count of 1.
|
||||
func TestDataSource_countRefDestroyError(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
Config: strings.TrimSpace(`
|
||||
data "test_data_source" "one" {
|
||||
count = 1
|
||||
input = "a"
|
||||
}
|
||||
|
||||
data "test_data_source" "two" {
|
||||
input = data.test_data_source.one[0].output
|
||||
}
|
||||
`),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestDataSource_planUpdate(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource" "a" {
|
||||
required = "first"
|
||||
required_map = {
|
||||
key = "1"
|
||||
}
|
||||
optional_force_new = "first"
|
||||
}
|
||||
|
||||
data "test_data_source" "a" {
|
||||
input = "${test_resource.a.computed_from_required}"
|
||||
}
|
||||
|
||||
output "out" {
|
||||
value = "${data.test_data_source.a.output}"
|
||||
}
|
||||
`),
|
||||
},
|
||||
{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource" "a" {
|
||||
required = "second"
|
||||
required_map = {
|
||||
key = "1"
|
||||
}
|
||||
optional_force_new = "second"
|
||||
}
|
||||
|
||||
data "test_data_source" "a" {
|
||||
input = "${test_resource.a.computed_from_required}"
|
||||
}
|
||||
|
||||
output "out" {
|
||||
value = "${data.test_data_source.a.output}"
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeAggregateTestCheckFunc(
|
||||
resource.TestCheckResourceAttr("data.test_data_source.a", "output", "second"),
|
||||
resource.TestCheckOutput("out", "second"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
|
@ -1,144 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/davecgh/go-spew/spew"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
)
|
||||
|
||||
func TestDiffApply_set(t *testing.T) {
|
||||
priorAttrs := map[string]string{
|
||||
"id": "testID",
|
||||
"egress.#": "1",
|
||||
"egress.2129912301.cidr_blocks.#": "1",
|
||||
"egress.2129912301.cidr_blocks.0": "10.0.0.0/8",
|
||||
"egress.2129912301.description": "Egress description",
|
||||
"egress.2129912301.from_port": "80",
|
||||
"egress.2129912301.ipv6_cidr_blocks.#": "0",
|
||||
"egress.2129912301.prefix_list_ids.#": "0",
|
||||
"egress.2129912301.protocol": "tcp",
|
||||
"egress.2129912301.security_groups.#": "0",
|
||||
"egress.2129912301.self": "false",
|
||||
"egress.2129912301.to_port": "8000",
|
||||
}
|
||||
|
||||
diff := &terraform.InstanceDiff{
|
||||
Attributes: map[string]*terraform.ResourceAttrDiff{
|
||||
"egress.2129912301.cidr_blocks.#": {Old: "1", New: "0", NewComputed: false, NewRemoved: false},
|
||||
"egress.2129912301.cidr_blocks.0": {Old: "10.0.0.0/8", New: "", NewComputed: false, NewRemoved: true},
|
||||
"egress.2129912301.description": {Old: "Egress description", New: "", NewComputed: false, NewRemoved: true},
|
||||
"egress.2129912301.from_port": {Old: "80", New: "0", NewComputed: false, NewRemoved: true},
|
||||
"egress.2129912301.ipv6_cidr_blocks.#": {Old: "0", New: "0", NewComputed: false, NewRemoved: false},
|
||||
"egress.2129912301.prefix_list_ids.#": {Old: "0", New: "0", NewComputed: false, NewRemoved: false},
|
||||
"egress.2129912301.protocol": {Old: "tcp", New: "", NewComputed: false, NewRemoved: true},
|
||||
"egress.2129912301.security_groups.#": {Old: "0", New: "0", NewComputed: false, NewRemoved: false},
|
||||
"egress.2129912301.self": {Old: "false", New: "false", NewComputed: false, NewRemoved: true},
|
||||
"egress.2129912301.to_port": {Old: "8000", New: "0", NewComputed: false, NewRemoved: true},
|
||||
"egress.746197026.cidr_blocks.#": {Old: "", New: "1", NewComputed: false, NewRemoved: false},
|
||||
"egress.746197026.cidr_blocks.0": {Old: "", New: "10.0.0.0/8", NewComputed: false, NewRemoved: false},
|
||||
"egress.746197026.description": {Old: "", New: "New egress description", NewComputed: false, NewRemoved: false},
|
||||
"egress.746197026.from_port": {Old: "", New: "80", NewComputed: false, NewRemoved: false},
|
||||
"egress.746197026.ipv6_cidr_blocks.#": {Old: "", New: "0", NewComputed: false, NewRemoved: false},
|
||||
"egress.746197026.prefix_list_ids.#": {Old: "", New: "0", NewComputed: false, NewRemoved: false},
|
||||
"egress.746197026.protocol": {Old: "", New: "tcp", NewComputed: false, NewRemoved: false, NewExtra: "tcp"},
|
||||
"egress.746197026.security_groups.#": {Old: "", New: "0", NewComputed: false, NewRemoved: false},
|
||||
"egress.746197026.self": {Old: "", New: "false", NewComputed: false, NewRemoved: false},
|
||||
"egress.746197026.to_port": {Old: "", New: "8000", NewComputed: false, NewRemoved: false},
|
||||
// an erroneous nil diff should do nothing
|
||||
"egress.111111111.to_port": nil,
|
||||
},
|
||||
}
|
||||
|
||||
resSchema := map[string]*schema.Schema{
|
||||
"egress": {
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
ConfigMode: schema.SchemaConfigModeAttr,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"from_port": {
|
||||
Type: schema.TypeInt,
|
||||
Required: true,
|
||||
},
|
||||
|
||||
"to_port": {
|
||||
Type: schema.TypeInt,
|
||||
Required: true,
|
||||
},
|
||||
|
||||
"protocol": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
|
||||
"cidr_blocks": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
Elem: &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
},
|
||||
},
|
||||
|
||||
"ipv6_cidr_blocks": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
Elem: &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
},
|
||||
},
|
||||
|
||||
"prefix_list_ids": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
},
|
||||
|
||||
"security_groups": {
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
Set: schema.HashString,
|
||||
},
|
||||
|
||||
"self": {
|
||||
Type: schema.TypeBool,
|
||||
Optional: true,
|
||||
Default: false,
|
||||
},
|
||||
|
||||
"description": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
expected := map[string]string{
|
||||
"egress.#": "1",
|
||||
"egress.746197026.cidr_blocks.#": "1",
|
||||
"egress.746197026.cidr_blocks.0": "10.0.0.0/8",
|
||||
"egress.746197026.description": "New egress description",
|
||||
"egress.746197026.from_port": "80", "egress.746197026.ipv6_cidr_blocks.#": "0",
|
||||
"egress.746197026.prefix_list_ids.#": "0",
|
||||
"egress.746197026.protocol": "tcp",
|
||||
"egress.746197026.security_groups.#": "0",
|
||||
"egress.746197026.self": "false",
|
||||
"egress.746197026.to_port": "8000",
|
||||
"id": "testID",
|
||||
}
|
||||
|
||||
attrs, err := diff.Apply(priorAttrs, (&schema.Resource{Schema: resSchema}).CoreConfigSchema())
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(attrs, expected) {
|
||||
t.Fatalf("wrong result\ngot: %s\nwant: %s\n", spew.Sdump(attrs), spew.Sdump(expected))
|
||||
}
|
||||
}
|
|
@ -1,59 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
)
|
||||
|
||||
func Provider() terraform.ResourceProvider {
|
||||
return &schema.Provider{
|
||||
Schema: map[string]*schema.Schema{
|
||||
// Optional attribute to label a particular instance for a test
|
||||
// that has multiple instances of this provider, so that they
|
||||
// can be distinguished using the test_provider_label data source.
|
||||
"label": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
},
|
||||
ProviderMetaSchema: map[string]*schema.Schema{
|
||||
// Optionally allow specifying information at a module-level
|
||||
"foo": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
},
|
||||
ResourcesMap: map[string]*schema.Resource{
|
||||
"test_resource": testResource(),
|
||||
"test_resource_gh12183": testResourceGH12183(),
|
||||
"test_resource_with_custom_diff": testResourceCustomDiff(),
|
||||
"test_resource_timeout": testResourceTimeout(),
|
||||
"test_resource_diff_suppress": testResourceDiffSuppress(),
|
||||
"test_resource_force_new": testResourceForceNew(),
|
||||
"test_resource_nested": testResourceNested(),
|
||||
"test_resource_nested_set": testResourceNestedSet(),
|
||||
"test_resource_state_func": testResourceStateFunc(),
|
||||
"test_resource_deprecated": testResourceDeprecated(),
|
||||
"test_resource_defaults": testResourceDefaults(),
|
||||
"test_resource_list": testResourceList(),
|
||||
"test_resource_list_set": testResourceListSet(),
|
||||
"test_resource_map": testResourceMap(),
|
||||
"test_resource_computed_set": testResourceComputedSet(),
|
||||
"test_resource_config_mode": testResourceConfigMode(),
|
||||
"test_resource_nested_id": testResourceNestedId(),
|
||||
"test_resource_provider_meta": testResourceProviderMeta(),
|
||||
"test_resource_signal": testResourceSignal(),
|
||||
"test_undeleteable": testResourceUndeleteable(),
|
||||
"test_resource_required_min": testResourceRequiredMin(),
|
||||
},
|
||||
DataSourcesMap: map[string]*schema.Resource{
|
||||
"test_data_source": testDataSource(),
|
||||
"test_provider_label": providerLabelDataSource(),
|
||||
},
|
||||
ConfigureFunc: providerConfigure,
|
||||
}
|
||||
}
|
||||
|
||||
func providerConfigure(d *schema.ResourceData) (interface{}, error) {
|
||||
return d.Get("label"), nil
|
||||
}
|
|
@ -1,24 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
)
|
||||
|
||||
var testAccProviders map[string]terraform.ResourceProvider
|
||||
var testAccProvider *schema.Provider
|
||||
|
||||
func TestProvider(t *testing.T) {
|
||||
if err := Provider().(*schema.Provider).InternalValidate(); err != nil {
|
||||
t.Fatalf("err: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
func init() {
|
||||
testAccProvider = Provider().(*schema.Provider)
|
||||
testAccProviders = map[string]terraform.ResourceProvider{
|
||||
"test": testAccProvider,
|
||||
}
|
||||
}
|
|
@ -1,233 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func testResource() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Create: testResourceCreate,
|
||||
Read: testResourceRead,
|
||||
Update: testResourceUpdate,
|
||||
Delete: testResourceDelete,
|
||||
|
||||
Importer: &schema.ResourceImporter{
|
||||
State: schema.ImportStatePassthrough,
|
||||
},
|
||||
|
||||
CustomizeDiff: func(d *schema.ResourceDiff, _ interface{}) error {
|
||||
if d.HasChange("optional") {
|
||||
d.SetNewComputed("planned_computed")
|
||||
}
|
||||
return nil
|
||||
},
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"required": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
"optional": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
"optional_bool": {
|
||||
Type: schema.TypeBool,
|
||||
Optional: true,
|
||||
},
|
||||
"optional_force_new": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
"optional_computed_map": {
|
||||
Type: schema.TypeMap,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
"optional_computed_force_new": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
"optional_computed": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
"computed_read_only": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"computed_from_required": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
"computed_read_only_force_new": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
"computed_list": {
|
||||
Type: schema.TypeList,
|
||||
Computed: true,
|
||||
Elem: &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
},
|
||||
},
|
||||
"set": {
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
Elem: &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
},
|
||||
Set: schema.HashString,
|
||||
},
|
||||
"computed_set": {
|
||||
Type: schema.TypeSet,
|
||||
Computed: true,
|
||||
Elem: &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
},
|
||||
Set: schema.HashString,
|
||||
},
|
||||
"map": {
|
||||
Type: schema.TypeMap,
|
||||
Optional: true,
|
||||
},
|
||||
"optional_map": {
|
||||
Type: schema.TypeMap,
|
||||
Optional: true,
|
||||
},
|
||||
"required_map": {
|
||||
Type: schema.TypeMap,
|
||||
Required: true,
|
||||
},
|
||||
"map_that_look_like_set": {
|
||||
Type: schema.TypeMap,
|
||||
Optional: true,
|
||||
Elem: &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
},
|
||||
},
|
||||
"computed_map": {
|
||||
Type: schema.TypeMap,
|
||||
Computed: true,
|
||||
},
|
||||
"list": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
Elem: &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
},
|
||||
},
|
||||
"list_of_map": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
Elem: &schema.Schema{
|
||||
Type: schema.TypeMap,
|
||||
Elem: &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
},
|
||||
},
|
||||
},
|
||||
"apply_error": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Description: "return and error during apply",
|
||||
},
|
||||
"planned_computed": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
Description: "copied the required field during apply, and plans computed when changed",
|
||||
},
|
||||
// this should return unset from GetOkExists
|
||||
"get_ok_exists_false": {
|
||||
Type: schema.TypeBool,
|
||||
Computed: true,
|
||||
Optional: true,
|
||||
Description: "do not set in config",
|
||||
},
|
||||
"int": {
|
||||
Type: schema.TypeInt,
|
||||
Optional: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func testResourceCreate(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId("testId")
|
||||
|
||||
errMsg, _ := d.Get("apply_error").(string)
|
||||
if errMsg != "" {
|
||||
return errors.New(errMsg)
|
||||
}
|
||||
|
||||
// Required must make it through to Create
|
||||
if _, ok := d.GetOk("required"); !ok {
|
||||
return fmt.Errorf("Missing attribute 'required', but it's required!")
|
||||
}
|
||||
if _, ok := d.GetOk("required_map"); !ok {
|
||||
return fmt.Errorf("Missing attribute 'required_map', but it's required!")
|
||||
}
|
||||
|
||||
d.Set("computed_from_required", d.Get("required"))
|
||||
|
||||
return testResourceRead(d, meta)
|
||||
}
|
||||
|
||||
func testResourceRead(d *schema.ResourceData, meta interface{}) error {
|
||||
d.Set("computed_read_only", "value_from_api")
|
||||
d.Set("computed_read_only_force_new", "value_from_api")
|
||||
if _, ok := d.GetOk("optional_computed_map"); !ok {
|
||||
d.Set("optional_computed_map", map[string]string{})
|
||||
}
|
||||
d.Set("computed_map", map[string]string{"key1": "value1"})
|
||||
d.Set("computed_list", []string{"listval1", "listval2"})
|
||||
d.Set("computed_set", []string{"setval1", "setval2"})
|
||||
|
||||
d.Set("planned_computed", d.Get("optional"))
|
||||
|
||||
// if there is no "set" value, erroneously set it to an empty set. This
|
||||
// might change a null value to an empty set, but we should be able to
|
||||
// ignore that.
|
||||
s := d.Get("set")
|
||||
if s == nil || s.(*schema.Set).Len() == 0 {
|
||||
d.Set("set", []interface{}{})
|
||||
}
|
||||
|
||||
// This mimics many providers always setting a *string value.
|
||||
// The existing behavior is that this will appear in the state as an empty
|
||||
// string, which we have to maintain.
|
||||
o := d.Get("optional")
|
||||
if o == "" {
|
||||
d.Set("optional", nil)
|
||||
}
|
||||
|
||||
// This should not show as set unless it's set in the config
|
||||
_, ok := d.GetOkExists("get_ok_exists_false")
|
||||
if ok {
|
||||
return errors.New("get_ok_exists_false should not be set")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func testResourceUpdate(d *schema.ResourceData, meta interface{}) error {
|
||||
errMsg, _ := d.Get("apply_error").(string)
|
||||
if errMsg != "" {
|
||||
return errors.New(errMsg)
|
||||
}
|
||||
return testResourceRead(d, meta)
|
||||
}
|
||||
|
||||
func testResourceDelete(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId("")
|
||||
return nil
|
||||
}
|
|
@ -1,123 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"math/rand"
|
||||
"strings"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/hashcode"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func testResourceComputedSet() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Create: testResourceComputedSetCreate,
|
||||
Read: testResourceComputedSetRead,
|
||||
Delete: testResourceComputedSetDelete,
|
||||
Update: testResourceComputedSetUpdate,
|
||||
|
||||
CustomizeDiff: func(d *schema.ResourceDiff, _ interface{}) error {
|
||||
o, n := d.GetChange("set_count")
|
||||
if o != n {
|
||||
d.SetNewComputed("string_set")
|
||||
}
|
||||
return nil
|
||||
},
|
||||
|
||||
Importer: &schema.ResourceImporter{
|
||||
State: schema.ImportStatePassthrough,
|
||||
},
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"set_count": {
|
||||
Type: schema.TypeInt,
|
||||
Optional: true,
|
||||
},
|
||||
"string_set": {
|
||||
Type: schema.TypeSet,
|
||||
Computed: true,
|
||||
Elem: &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
},
|
||||
Set: schema.HashString,
|
||||
},
|
||||
|
||||
"rule": {
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"id": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"ip_protocol": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
ForceNew: false,
|
||||
},
|
||||
|
||||
"cidr": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
ForceNew: false,
|
||||
StateFunc: func(v interface{}) string {
|
||||
return strings.ToLower(v.(string))
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"optional_set": {
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
Set: schema.HashString,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func computeSecGroupV2RuleHash(v interface{}) int {
|
||||
var buf bytes.Buffer
|
||||
m := v.(map[string]interface{})
|
||||
buf.WriteString(fmt.Sprintf("%s-", m["ip_protocol"].(string)))
|
||||
buf.WriteString(fmt.Sprintf("%s-", strings.ToLower(m["cidr"].(string))))
|
||||
|
||||
return hashcode.String(buf.String())
|
||||
}
|
||||
|
||||
func testResourceComputedSetCreate(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId(fmt.Sprintf("%x", rand.Int63()))
|
||||
return testResourceComputedSetRead(d, meta)
|
||||
}
|
||||
|
||||
func testResourceComputedSetRead(d *schema.ResourceData, meta interface{}) error {
|
||||
count := 3
|
||||
v, ok := d.GetOk("set_count")
|
||||
if ok {
|
||||
count = v.(int)
|
||||
}
|
||||
|
||||
var set []interface{}
|
||||
for i := 0; i < count; i++ {
|
||||
set = append(set, fmt.Sprintf("%d", i))
|
||||
}
|
||||
|
||||
d.Set("string_set", schema.NewSet(schema.HashString, set))
|
||||
|
||||
// This isn't computed, but we should be able to ignore without issues.
|
||||
d.Set("optional_set", []interface{}{})
|
||||
return nil
|
||||
}
|
||||
|
||||
func testResourceComputedSetUpdate(d *schema.ResourceData, meta interface{}) error {
|
||||
return testResourceComputedSetRead(d, meta)
|
||||
}
|
||||
|
||||
func testResourceComputedSetDelete(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId("")
|
||||
return nil
|
||||
}
|
|
@ -1,71 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
)
|
||||
|
||||
func TestResourceComputedSet_update(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_computed_set" "foo" {
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_computed_set.foo", "string_set.#", "3",
|
||||
),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_computed_set" "foo" {
|
||||
set_count = 5
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_computed_set.foo", "string_set.#", "5",
|
||||
),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_computed_set" "foo" {
|
||||
set_count = 2
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_computed_set.foo", "string_set.#", "2",
|
||||
),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceComputedSet_ruleTest(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_computed_set" "foo" {
|
||||
rule {
|
||||
ip_protocol = "udp"
|
||||
cidr = "0.0.0.0/0"
|
||||
}
|
||||
}
|
||||
`),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
|
@ -1,78 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func testResourceConfigMode() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Create: testResourceConfigModeCreate,
|
||||
Read: testResourceConfigModeRead,
|
||||
Delete: testResourceConfigModeDelete,
|
||||
Update: testResourceConfigModeUpdate,
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"resource_as_attr": {
|
||||
Type: schema.TypeList,
|
||||
ConfigMode: schema.SchemaConfigModeAttr,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"foo": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"nested_set": {
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
ConfigMode: schema.SchemaConfigModeAttr,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"value": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
"set": {
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func testResourceConfigModeCreate(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId("placeholder")
|
||||
return testResourceConfigModeRead(d, meta)
|
||||
}
|
||||
|
||||
func testResourceConfigModeRead(d *schema.ResourceData, meta interface{}) error {
|
||||
if l, ok := d.Get("resource_as_attr").([]interface{}); !ok {
|
||||
return fmt.Errorf("resource_as_attr should appear as []interface{}, not %T", l)
|
||||
} else {
|
||||
for i, item := range l {
|
||||
if _, ok := item.(map[string]interface{}); !ok {
|
||||
return fmt.Errorf("resource_as_attr[%d] should appear as map[string]interface{}, not %T", i, item)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func testResourceConfigModeUpdate(d *schema.ResourceData, meta interface{}) error {
|
||||
return testResourceConfigModeRead(d, meta)
|
||||
}
|
||||
|
||||
func testResourceConfigModeDelete(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId("")
|
||||
return nil
|
||||
}
|
|
@ -1,120 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
)
|
||||
|
||||
func TestResourceConfigMode(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_config_mode" "foo" {
|
||||
resource_as_attr = [
|
||||
{
|
||||
foo = "resource_as_attr 0"
|
||||
},
|
||||
{
|
||||
foo = "resource_as_attr 1"
|
||||
},
|
||||
]
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr.#", "2"),
|
||||
resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr.0.foo", "resource_as_attr 0"),
|
||||
resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr.1.foo", "resource_as_attr 1"),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_config_mode" "foo" {
|
||||
# Due to a preprocessing fixup we do in lang.EvalBlock, it's allowed
|
||||
# to specify resource_as_attr members using one or more nested blocks
|
||||
# instead of attribute syntax, if desired. This should be equivalent
|
||||
# to the previous config.
|
||||
#
|
||||
# This allowance is made for backward-compatibility with existing providers
|
||||
# before Terraform v0.12 that were expecting nested block types to also
|
||||
# support attribute syntax; it should not be used for any new use-cases.
|
||||
resource_as_attr {
|
||||
foo = "resource_as_attr 0"
|
||||
}
|
||||
resource_as_attr {
|
||||
foo = "resource_as_attr 1"
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr.#", "2"),
|
||||
resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr.0.foo", "resource_as_attr 0"),
|
||||
resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr.1.foo", "resource_as_attr 1"),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_config_mode" "foo" {
|
||||
resource_as_attr = [
|
||||
{
|
||||
foo = "resource_as_attr 0 updated"
|
||||
},
|
||||
]
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr.#", "1"),
|
||||
resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr.0.foo", "resource_as_attr 0 updated"),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_config_mode" "foo" {
|
||||
resource_as_attr = []
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr("test_resource_config_mode.foo", "resource_as_attr.#", "0"),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_config_mode" "foo" {
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckNoResourceAttr("test_resource_config_mode.foo", "resource_as_attr.#"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceConfigMode_nestedSet(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_config_mode" "foo" {
|
||||
resource_as_attr = []
|
||||
|
||||
nested_set {
|
||||
value = "a"
|
||||
}
|
||||
nested_set {
|
||||
value = "b"
|
||||
set = []
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
|
@ -1,224 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
)
|
||||
|
||||
// TestResourceDataDep_alignedCountScaleOut tests to make sure interpolation
|
||||
// works (namely without index errors) when a data source and a resource share
|
||||
// the same count variable during scale-out with an existing state.
|
||||
func TestResourceDataDep_alignedCountScaleOut(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: func(s *terraform.State) error {
|
||||
return nil
|
||||
},
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
Config: testResourceDataDepConfig(2),
|
||||
},
|
||||
{
|
||||
Config: testResourceDataDepConfig(4),
|
||||
Check: resource.TestCheckOutput("out", "value_from_api,value_from_api,value_from_api,value_from_api"),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// TestResourceDataDep_alignedCountScaleIn tests to make sure interpolation
|
||||
// works (namely without index errors) when a data source and a resource share
|
||||
// the same count variable during scale-in with an existing state.
|
||||
func TestResourceDataDep_alignedCountScaleIn(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: func(s *terraform.State) error {
|
||||
return nil
|
||||
},
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
Config: testResourceDataDepConfig(4),
|
||||
},
|
||||
{
|
||||
Config: testResourceDataDepConfig(2),
|
||||
Check: resource.TestCheckOutput("out", "value_from_api,value_from_api"),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// TestDataResourceDep_alignedCountScaleOut functions like
|
||||
// TestResourceDataDep_alignedCountScaleOut, but with the dependencies swapped
|
||||
// (resource now depends on data source, a pretty regular use case, but
|
||||
// included here to check for regressions).
|
||||
func TestDataResourceDep_alignedCountScaleOut(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: func(s *terraform.State) error {
|
||||
return nil
|
||||
},
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
Config: testDataResourceDepConfig(2),
|
||||
},
|
||||
{
|
||||
Config: testDataResourceDepConfig(4),
|
||||
Check: resource.TestCheckOutput("out", "test,test,test,test"),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// TestDataResourceDep_alignedCountScaleIn functions like
|
||||
// TestResourceDataDep_alignedCountScaleIn, but with the dependencies swapped
|
||||
// (resource now depends on data source, a pretty regular use case, but
|
||||
// included here to check for regressions).
|
||||
func TestDataResourceDep_alignedCountScaleIn(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: func(s *terraform.State) error {
|
||||
return nil
|
||||
},
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
Config: testDataResourceDepConfig(4),
|
||||
},
|
||||
{
|
||||
Config: testDataResourceDepConfig(2),
|
||||
Check: resource.TestCheckOutput("out", "test,test"),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// TestResourceResourceDep_alignedCountScaleOut functions like
|
||||
// TestResourceDataDep_alignedCountScaleOut, but with a resource-to-resource
|
||||
// dependency instead, a pretty regular use case, but included here to check
|
||||
// for regressions.
|
||||
func TestResourceResourceDep_alignedCountScaleOut(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: func(s *terraform.State) error {
|
||||
return nil
|
||||
},
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
Config: testResourceResourceDepConfig(2),
|
||||
},
|
||||
{
|
||||
Config: testResourceResourceDepConfig(4),
|
||||
Check: resource.TestCheckOutput("out", "test,test,test,test"),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// TestResourceResourceDep_alignedCountScaleIn functions like
|
||||
// TestResourceDataDep_alignedCountScaleIn, but with a resource-to-resource
|
||||
// dependency instead, a pretty regular use case, but included here to check
|
||||
// for regressions.
|
||||
func TestResourceResourceDep_alignedCountScaleIn(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: func(s *terraform.State) error {
|
||||
return nil
|
||||
},
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
Config: testResourceResourceDepConfig(4),
|
||||
},
|
||||
{
|
||||
Config: testResourceResourceDepConfig(2),
|
||||
Check: resource.TestCheckOutput("out", "test,test"),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func testResourceDataDepConfig(count int) string {
|
||||
return fmt.Sprintf(`
|
||||
variable num {
|
||||
default = "%d"
|
||||
}
|
||||
|
||||
resource "test_resource" "foo" {
|
||||
count = "${var.num}"
|
||||
required = "yes"
|
||||
|
||||
required_map = {
|
||||
"foo" = "bar"
|
||||
}
|
||||
}
|
||||
|
||||
data "test_data_source" "bar" {
|
||||
count = "${var.num}"
|
||||
input = "${test_resource.foo.*.computed_read_only[count.index]}"
|
||||
}
|
||||
|
||||
output "out" {
|
||||
value = "${join(",", data.test_data_source.bar.*.output)}"
|
||||
}
|
||||
`, count)
|
||||
}
|
||||
|
||||
func testDataResourceDepConfig(count int) string {
|
||||
return fmt.Sprintf(`
|
||||
variable num {
|
||||
default = "%d"
|
||||
}
|
||||
|
||||
data "test_data_source" "foo" {
|
||||
count = "${var.num}"
|
||||
input = "test"
|
||||
}
|
||||
|
||||
resource "test_resource" "bar" {
|
||||
count = "${var.num}"
|
||||
required = "yes"
|
||||
optional = "${data.test_data_source.foo.*.output[count.index]}"
|
||||
|
||||
required_map = {
|
||||
"foo" = "bar"
|
||||
}
|
||||
}
|
||||
|
||||
output "out" {
|
||||
value = "${join(",", test_resource.bar.*.optional)}"
|
||||
}
|
||||
`, count)
|
||||
}
|
||||
|
||||
func testResourceResourceDepConfig(count int) string {
|
||||
return fmt.Sprintf(`
|
||||
variable num {
|
||||
default = "%d"
|
||||
}
|
||||
|
||||
resource "test_resource" "foo" {
|
||||
count = "${var.num}"
|
||||
required = "yes"
|
||||
optional = "test"
|
||||
|
||||
required_map = {
|
||||
"foo" = "bar"
|
||||
}
|
||||
}
|
||||
|
||||
resource "test_resource" "bar" {
|
||||
count = "${var.num}"
|
||||
required = "yes"
|
||||
optional = "${test_resource.foo.*.optional[count.index]}"
|
||||
|
||||
required_map = {
|
||||
"foo" = "bar"
|
||||
}
|
||||
}
|
||||
|
||||
output "out" {
|
||||
value = "${join(",", test_resource.bar.*.optional)}"
|
||||
}
|
||||
`, count)
|
||||
}
|
|
@ -1,491 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
)
|
||||
|
||||
var dataprocClusterSchema = map[string]*schema.Schema{
|
||||
"name": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
|
||||
"project": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
|
||||
"region": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Default: "global",
|
||||
ForceNew: true,
|
||||
},
|
||||
|
||||
"labels": {
|
||||
Type: schema.TypeMap,
|
||||
Optional: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
// GCP automatically adds two labels
|
||||
// 'goog-dataproc-cluster-uuid'
|
||||
// 'goog-dataproc-cluster-name'
|
||||
Computed: true,
|
||||
DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool {
|
||||
if old != "" {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
},
|
||||
},
|
||||
|
||||
"tag_set": {
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
Set: schema.HashString,
|
||||
},
|
||||
|
||||
"cluster_config": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
MaxItems: 1,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
|
||||
"delete_autogen_bucket": {
|
||||
Type: schema.TypeBool,
|
||||
Optional: true,
|
||||
Default: false,
|
||||
Removed: "If you need a bucket that can be deleted, please create" +
|
||||
"a new one and set the `staging_bucket` field",
|
||||
},
|
||||
|
||||
"staging_bucket": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
"bucket": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"gce_cluster_config": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
MaxItems: 1,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
|
||||
"zone": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
|
||||
"network": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
ForceNew: true,
|
||||
ConflictsWith: []string{"cluster_config.0.gce_cluster_config.0.subnetwork"},
|
||||
},
|
||||
|
||||
"subnetwork": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
ConflictsWith: []string{"cluster_config.0.gce_cluster_config.0.network"},
|
||||
},
|
||||
|
||||
"tags": {
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
},
|
||||
|
||||
"service_account": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
|
||||
"service_account_scopes": {
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
ForceNew: true,
|
||||
Elem: &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
},
|
||||
},
|
||||
|
||||
"internal_ip_only": {
|
||||
Type: schema.TypeBool,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
Default: false,
|
||||
},
|
||||
|
||||
"metadata": {
|
||||
Type: schema.TypeMap,
|
||||
Optional: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
ForceNew: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
"master_config": &schema.Schema{
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
MaxItems: 1,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"num_instances": {
|
||||
Type: schema.TypeInt,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"image_uri": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
|
||||
"machine_type": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
|
||||
"disk_config": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
MaxItems: 1,
|
||||
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"num_local_ssds": {
|
||||
Type: schema.TypeInt,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
|
||||
"boot_disk_size_gb": {
|
||||
Type: schema.TypeInt,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
|
||||
"boot_disk_type": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
Default: "pd-standard",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"accelerators": {
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"accelerator_type": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
|
||||
"accelerator_count": {
|
||||
Type: schema.TypeInt,
|
||||
Required: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"instance_names": {
|
||||
Type: schema.TypeList,
|
||||
Computed: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"preemptible_worker_config": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
MaxItems: 1,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"num_instances": {
|
||||
Type: schema.TypeInt,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
"disk_config": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
MaxItems: 1,
|
||||
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"num_local_ssds": {
|
||||
Type: schema.TypeInt,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
|
||||
"boot_disk_size_gb": {
|
||||
Type: schema.TypeInt,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
|
||||
"boot_disk_type": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
Default: "pd-standard",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
"instance_names": {
|
||||
Type: schema.TypeList,
|
||||
Computed: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
"software_config": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
MaxItems: 1,
|
||||
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"image_version": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
|
||||
"override_properties": {
|
||||
Type: schema.TypeMap,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
},
|
||||
|
||||
"properties": {
|
||||
Type: schema.TypeMap,
|
||||
Computed: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
"initialization_action": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"script": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
|
||||
"timeout_sec": {
|
||||
Type: schema.TypeInt,
|
||||
Optional: true,
|
||||
Default: 300,
|
||||
ForceNew: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"encryption_config": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
MaxItems: 1,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"kms_key_name": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func TestDiffApply_dataprocCluster(t *testing.T) {
|
||||
priorAttrs := map[string]string{
|
||||
"cluster_config.#": "1",
|
||||
"cluster_config.0.bucket": "dataproc-1dc18cb2-116e-4e92-85ea-ff63a1bf2745-us-central1",
|
||||
"cluster_config.0.delete_autogen_bucket": "false",
|
||||
"cluster_config.0.encryption_config.#": "0",
|
||||
"cluster_config.0.gce_cluster_config.#": "1",
|
||||
"cluster_config.0.gce_cluster_config.0.internal_ip_only": "false",
|
||||
"cluster_config.0.gce_cluster_config.0.metadata.%": "0",
|
||||
"cluster_config.0.gce_cluster_config.0.network": "https://www.googleapis.com/compute/v1/projects/hc-terraform-testing/global/networks/default",
|
||||
"cluster_config.0.gce_cluster_config.0.service_account": "",
|
||||
"cluster_config.0.gce_cluster_config.0.service_account_scopes.#": "7",
|
||||
"cluster_config.0.gce_cluster_config.0.service_account_scopes.1245378569": "https://www.googleapis.com/auth/bigtable.admin.table",
|
||||
"cluster_config.0.gce_cluster_config.0.service_account_scopes.1328717722": "https://www.googleapis.com/auth/devstorage.read_write",
|
||||
"cluster_config.0.gce_cluster_config.0.service_account_scopes.1693978638": "https://www.googleapis.com/auth/devstorage.full_control",
|
||||
"cluster_config.0.gce_cluster_config.0.service_account_scopes.172152165": "https://www.googleapis.com/auth/logging.write",
|
||||
"cluster_config.0.gce_cluster_config.0.service_account_scopes.2401844655": "https://www.googleapis.com/auth/bigquery",
|
||||
"cluster_config.0.gce_cluster_config.0.service_account_scopes.299921284": "https://www.googleapis.com/auth/bigtable.data",
|
||||
"cluster_config.0.gce_cluster_config.0.service_account_scopes.3804780973": "https://www.googleapis.com/auth/cloud.useraccounts.readonly",
|
||||
"cluster_config.0.gce_cluster_config.0.subnetwork": "",
|
||||
"cluster_config.0.gce_cluster_config.0.tags.#": "0",
|
||||
"cluster_config.0.gce_cluster_config.0.zone": "us-central1-f",
|
||||
"cluster_config.0.initialization_action.#": "0",
|
||||
"cluster_config.0.master_config.#": "1",
|
||||
"cluster_config.0.master_config.0.accelerators.#": "0",
|
||||
"cluster_config.0.master_config.0.disk_config.#": "1",
|
||||
"cluster_config.0.master_config.0.disk_config.0.boot_disk_size_gb": "500",
|
||||
"cluster_config.0.master_config.0.disk_config.0.boot_disk_type": "pd-standard",
|
||||
"cluster_config.0.master_config.0.disk_config.0.num_local_ssds": "0",
|
||||
"cluster_config.0.master_config.0.image_uri": "https://www.googleapis.com/compute/v1/projects/cloud-dataproc/global/images/dataproc-1-3-deb9-20190228-000000-rc01",
|
||||
"cluster_config.0.master_config.0.instance_names.#": "1",
|
||||
"cluster_config.0.master_config.0.instance_names.0": "dproc-cluster-test-2ww3c60iww-m",
|
||||
"cluster_config.0.master_config.0.machine_type": "n1-standard-4",
|
||||
"cluster_config.0.master_config.0.num_instances": "1",
|
||||
"cluster_config.0.preemptible_worker_config.#": "1",
|
||||
"cluster_config.0.preemptible_worker_config.0.disk_config.#": "1",
|
||||
"cluster_config.0.preemptible_worker_config.0.instance_names.#": "0",
|
||||
"cluster_config.0.preemptible_worker_config.0.num_instances": "0",
|
||||
"cluster_config.0.software_config.#": "1",
|
||||
"cluster_config.0.software_config.0.image_version": "1.3.28-deb9",
|
||||
"cluster_config.0.software_config.0.override_properties.%": "0",
|
||||
"cluster_config.0.software_config.0.properties.%": "14",
|
||||
"cluster_config.0.software_config.0.properties.capacity-scheduler:yarn.scheduler.capacity.root.default.ordering-policy": "fair",
|
||||
"cluster_config.0.software_config.0.properties.core:fs.gs.block.size": "134217728",
|
||||
"cluster_config.0.software_config.0.properties.core:fs.gs.metadata.cache.enable": "false",
|
||||
"cluster_config.0.software_config.0.properties.core:hadoop.ssl.enabled.protocols": "TLSv1,TLSv1.1,TLSv1.2",
|
||||
"cluster_config.0.software_config.0.properties.distcp:mapreduce.map.java.opts": "-Xmx768m",
|
||||
"cluster_config.0.software_config.0.properties.distcp:mapreduce.map.memory.mb": "1024",
|
||||
"cluster_config.0.software_config.0.properties.distcp:mapreduce.reduce.java.opts": "-Xmx768m",
|
||||
"cluster_config.0.software_config.0.properties.distcp:mapreduce.reduce.memory.mb": "1024",
|
||||
"cluster_config.0.software_config.0.properties.hdfs:dfs.datanode.address": "0.0.0.0:9866",
|
||||
"cluster_config.0.software_config.0.properties.hdfs:dfs.datanode.http.address": "0.0.0.0:9864",
|
||||
"cluster_config.0.software_config.0.properties.hdfs:dfs.datanode.https.address": "0.0.0.0:9865",
|
||||
"cluster_config.0.software_config.0.properties.hdfs:dfs.datanode.ipc.address": "0.0.0.0:9867",
|
||||
"cluster_config.0.software_config.0.properties.hdfs:dfs.namenode.handler.count": "20",
|
||||
"cluster_config.0.software_config.0.properties.hdfs:dfs.namenode.http-address": "0.0.0.0:9870",
|
||||
"cluster_config.0.software_config.0.properties.hdfs:dfs.namenode.https-address": "0.0.0.0:9871",
|
||||
"cluster_config.0.software_config.0.properties.hdfs:dfs.namenode.lifeline.rpc-address": "dproc-cluster-test-2ww3c60iww-m:8050",
|
||||
"cluster_config.0.software_config.0.properties.hdfs:dfs.namenode.secondary.http-address": "0.0.0.0:9868",
|
||||
"cluster_config.0.software_config.0.properties.hdfs:dfs.namenode.secondary.https-address": "0.0.0.0:9869",
|
||||
"cluster_config.0.software_config.0.properties.hdfs:dfs.namenode.service.handler.count": "10",
|
||||
"cluster_config.0.software_config.0.properties.hdfs:dfs.namenode.servicerpc-address": "dproc-cluster-test-2ww3c60iww-m:8051",
|
||||
"cluster_config.0.software_config.0.properties.mapred-env:HADOOP_JOB_HISTORYSERVER_HEAPSIZE": "3840",
|
||||
"cluster_config.0.software_config.0.properties.mapred:mapreduce.job.maps": "21",
|
||||
"cluster_config.0.software_config.0.properties.mapred:mapreduce.job.reduce.slowstart.completedmaps": "0.95",
|
||||
"cluster_config.0.software_config.0.properties.mapred:mapreduce.job.reduces": "7",
|
||||
"cluster_config.0.software_config.0.properties.mapred:mapreduce.map.cpu.vcores": "1",
|
||||
"cluster_config.0.software_config.0.properties.mapred:mapreduce.map.java.opts": "-Xmx2457m",
|
||||
"cluster_config.0.software_config.0.properties.mapred:mapreduce.map.memory.mb": "3072",
|
||||
"cluster_config.0.software_config.0.properties.mapred:mapreduce.reduce.cpu.vcores": "1",
|
||||
"cluster_config.0.software_config.0.properties.mapred:mapreduce.reduce.java.opts": "-Xmx2457m",
|
||||
"cluster_config.0.software_config.0.properties.mapred:mapreduce.reduce.memory.mb": "3072",
|
||||
"cluster_config.0.software_config.0.properties.mapred:mapreduce.task.io.sort.mb": "256",
|
||||
"cluster_config.0.software_config.0.properties.mapred:yarn.app.mapreduce.am.command-opts": "-Xmx2457m",
|
||||
"cluster_config.0.software_config.0.properties.mapred:yarn.app.mapreduce.am.resource.cpu-vcores": "1",
|
||||
"cluster_config.0.software_config.0.properties.mapred:yarn.app.mapreduce.am.resource.mb": "3072",
|
||||
"cluster_config.0.software_config.0.properties.presto-jvm:MaxHeapSize": "12288m",
|
||||
"cluster_config.0.software_config.0.properties.presto:query.max-memory-per-node": "7372MB",
|
||||
"cluster_config.0.software_config.0.properties.presto:query.max-total-memory-per-node": "7372MB",
|
||||
"cluster_config.0.software_config.0.properties.spark-env:SPARK_DAEMON_MEMORY": "3840m",
|
||||
"cluster_config.0.software_config.0.properties.spark:spark.driver.maxResultSize": "1920m",
|
||||
"cluster_config.0.software_config.0.properties.spark:spark.driver.memory": "3840m",
|
||||
"cluster_config.0.software_config.0.properties.spark:spark.executor.cores": "2",
|
||||
"cluster_config.0.software_config.0.properties.spark:spark.executor.instances": "2",
|
||||
"cluster_config.0.software_config.0.properties.spark:spark.executor.memory": "5586m",
|
||||
"cluster_config.0.software_config.0.properties.spark:spark.executorEnv.OPENBLAS_NUM_THREADS": "1",
|
||||
"cluster_config.0.software_config.0.properties.spark:spark.scheduler.mode": "FAIR",
|
||||
"cluster_config.0.software_config.0.properties.spark:spark.sql.cbo.enabled": "true",
|
||||
"cluster_config.0.software_config.0.properties.spark:spark.yarn.am.memory": "640m",
|
||||
"cluster_config.0.software_config.0.properties.yarn-env:YARN_TIMELINESERVER_HEAPSIZE": "3840",
|
||||
"cluster_config.0.software_config.0.properties.yarn:yarn.nodemanager.resource.memory-mb": "12288",
|
||||
"cluster_config.0.software_config.0.properties.yarn:yarn.resourcemanager.nodemanager-graceful-decommission-timeout-secs": "86400",
|
||||
"cluster_config.0.software_config.0.properties.yarn:yarn.scheduler.maximum-allocation-mb": "12288",
|
||||
"cluster_config.0.software_config.0.properties.yarn:yarn.scheduler.minimum-allocation-mb": "1024",
|
||||
"cluster_config.0.staging_bucket": "",
|
||||
"id": "dproc-cluster-test-ktbyrniu4e",
|
||||
"labels.%": "4",
|
||||
"labels.goog-dataproc-cluster-name": "dproc-cluster-test-ktbyrniu4e",
|
||||
"labels.goog-dataproc-cluster-uuid": "d576c4e0-8fda-4ad1-abf5-ec951ab25855",
|
||||
"labels.goog-dataproc-location": "us-central1",
|
||||
"labels.key1": "value1",
|
||||
"tag_set.#": "0",
|
||||
}
|
||||
|
||||
diff := &terraform.InstanceDiff{
|
||||
Attributes: map[string]*terraform.ResourceAttrDiff{
|
||||
"labels.%": &terraform.ResourceAttrDiff{Old: "4", New: "1", NewComputed: false, NewRemoved: false, NewExtra: interface{}(nil), RequiresNew: false, Sensitive: false, Type: 0x0},
|
||||
"labels.goog-dataproc-cluster-name": &terraform.ResourceAttrDiff{Old: "dproc-cluster-test-ktbyrniu4e", New: "", NewComputed: false, NewRemoved: true, NewExtra: interface{}(nil), RequiresNew: false, Sensitive: false, Type: 0x0},
|
||||
"labels.goog-dataproc-cluster-uuid": &terraform.ResourceAttrDiff{Old: "d576c4e0-8fda-4ad1-abf5-ec951ab25855", New: "", NewComputed: false, NewRemoved: true, NewExtra: interface{}(nil), RequiresNew: false, Sensitive: false, Type: 0x0},
|
||||
"labels.goog-dataproc-location": &terraform.ResourceAttrDiff{Old: "us-central1", New: "", NewComputed: false, NewRemoved: true, NewExtra: interface{}(nil), RequiresNew: false, Sensitive: false, Type: 0x0},
|
||||
},
|
||||
}
|
||||
|
||||
newAttrs, err := diff.Apply(priorAttrs, (&schema.Resource{Schema: dataprocClusterSchema}).CoreConfigSchema())
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// the diff'ed labale elements should be removed
|
||||
delete(priorAttrs, "labels.goog-dataproc-cluster-name")
|
||||
delete(priorAttrs, "labels.goog-dataproc-cluster-uuid")
|
||||
delete(priorAttrs, "labels.goog-dataproc-location")
|
||||
priorAttrs["labels.%"] = "1"
|
||||
|
||||
// the missing required "name" should be added
|
||||
priorAttrs["name"] = ""
|
||||
|
||||
if !reflect.DeepEqual(priorAttrs, newAttrs) {
|
||||
t.Fatal(cmp.Diff(priorAttrs, newAttrs))
|
||||
}
|
||||
}
|
|
@ -1,70 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math/rand"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func testResourceDefaults() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Create: testResourceDefaultsCreate,
|
||||
Read: testResourceDefaultsRead,
|
||||
Delete: testResourceDefaultsDelete,
|
||||
Update: testResourceDefaultsUpdate,
|
||||
|
||||
Importer: &schema.ResourceImporter{
|
||||
State: schema.ImportStatePassthrough,
|
||||
},
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"default_string": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Default: "default string",
|
||||
},
|
||||
"default_bool": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Default: true,
|
||||
},
|
||||
"nested": {
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"string": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Default: "default nested",
|
||||
},
|
||||
"optional": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func testResourceDefaultsCreate(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId(fmt.Sprintf("%x", rand.Int63()))
|
||||
return testResourceDefaultsRead(d, meta)
|
||||
}
|
||||
|
||||
func testResourceDefaultsUpdate(d *schema.ResourceData, meta interface{}) error {
|
||||
return testResourceDefaultsRead(d, meta)
|
||||
}
|
||||
|
||||
func testResourceDefaultsRead(d *schema.ResourceData, meta interface{}) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func testResourceDefaultsDelete(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId("")
|
||||
return nil
|
||||
}
|
|
@ -1,168 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
)
|
||||
|
||||
func TestResourceDefaults_basic(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_defaults" "foo" {
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_defaults.foo", "default_string", "default string",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_defaults.foo", "default_bool", "1",
|
||||
),
|
||||
resource.TestCheckNoResourceAttr(
|
||||
"test_resource_defaults.foo", "nested.#",
|
||||
),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceDefaults_change(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_defaults" "foo" {
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_defaults.foo", "default_string", "default string",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_defaults.foo", "default_bool", "1",
|
||||
),
|
||||
resource.TestCheckNoResourceAttr(
|
||||
"test_resource_defaults.foo", "nested.#",
|
||||
),
|
||||
),
|
||||
},
|
||||
{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_defaults" "foo" {
|
||||
default_string = "new"
|
||||
default_bool = false
|
||||
nested {
|
||||
optional = "nested"
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_defaults.foo", "default_string", "new",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_defaults.foo", "default_bool", "false",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_defaults.foo", "nested.#", "1",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_defaults.foo", "nested.2950978312.optional", "nested",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_defaults.foo", "nested.2950978312.string", "default nested",
|
||||
),
|
||||
),
|
||||
},
|
||||
{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_defaults" "foo" {
|
||||
default_string = "new"
|
||||
default_bool = false
|
||||
nested {
|
||||
optional = "nested"
|
||||
string = "new"
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_defaults.foo", "default_string", "new",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_defaults.foo", "default_bool", "false",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_defaults.foo", "nested.#", "1",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_defaults.foo", "nested.782850362.optional", "nested",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_defaults.foo", "nested.782850362.string", "new",
|
||||
),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceDefaults_inSet(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_defaults" "foo" {
|
||||
nested {
|
||||
optional = "val"
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_defaults.foo", "default_string", "default string",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_defaults.foo", "default_bool", "1",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_defaults.foo", "nested.2826070548.optional", "val",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_defaults.foo", "nested.2826070548.string", "default nested",
|
||||
),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestDefaults_emptyString(t *testing.T) {
|
||||
config := `
|
||||
resource "test_resource_defaults" "test" {
|
||||
default_string = ""
|
||||
}
|
||||
`
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
Config: config,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr("test_resource_defaults.test", "default_string", ""),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
|
@ -1,119 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func testResourceDeprecated() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Create: testResourceDeprecatedCreate,
|
||||
Read: testResourceDeprecatedRead,
|
||||
Update: testResourceDeprecatedUpdate,
|
||||
Delete: testResourceDeprecatedDelete,
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"map_deprecated": {
|
||||
Type: schema.TypeMap,
|
||||
Optional: true,
|
||||
Deprecated: "deprecated",
|
||||
},
|
||||
"map_removed": {
|
||||
Type: schema.TypeMap,
|
||||
Optional: true,
|
||||
Removed: "removed",
|
||||
},
|
||||
"set_block_deprecated": {
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
MaxItems: 1,
|
||||
Deprecated: "deprecated",
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"value": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
Deprecated: "deprecated",
|
||||
},
|
||||
"optional": {
|
||||
Type: schema.TypeString,
|
||||
ForceNew: true,
|
||||
Optional: true,
|
||||
Deprecated: "deprecated",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"set_block_removed": {
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
MaxItems: 1,
|
||||
Removed: "Removed",
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"optional": {
|
||||
Type: schema.TypeString,
|
||||
ForceNew: true,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
Removed: "removed",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"list_block_deprecated": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
Deprecated: "deprecated",
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"value": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
Deprecated: "deprecated",
|
||||
},
|
||||
"optional": {
|
||||
Type: schema.TypeString,
|
||||
ForceNew: true,
|
||||
Optional: true,
|
||||
Deprecated: "deprecated",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"list_block_removed": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
Removed: "removed",
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"optional": {
|
||||
Type: schema.TypeString,
|
||||
ForceNew: true,
|
||||
Optional: true,
|
||||
Removed: "removed",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func testResourceDeprecatedCreate(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId("testId")
|
||||
return nil
|
||||
}
|
||||
|
||||
func testResourceDeprecatedRead(d *schema.ResourceData, meta interface{}) error {
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func testResourceDeprecatedUpdate(d *schema.ResourceData, meta interface{}) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func testResourceDeprecatedDelete(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId("")
|
||||
return nil
|
||||
}
|
|
@ -1,71 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
)
|
||||
|
||||
// an empty config should be ok, because no deprecated/removed fields are set.
|
||||
func TestResourceDeprecated_empty(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_deprecated" "foo" {
|
||||
}
|
||||
`),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// Deprecated fields should still work
|
||||
func TestResourceDeprecated_deprecatedOK(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_deprecated" "foo" {
|
||||
map_deprecated = {
|
||||
"a" = "b",
|
||||
}
|
||||
set_block_deprecated {
|
||||
value = "1"
|
||||
}
|
||||
list_block_deprecated {
|
||||
value = "2"
|
||||
}
|
||||
}
|
||||
`),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// Declaring an empty block should trigger the error
|
||||
func TestResourceDeprecated_removedBlocks(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_deprecated" "foo" {
|
||||
set_block_removed {
|
||||
}
|
||||
list_block_removed {
|
||||
}
|
||||
}
|
||||
`),
|
||||
ExpectError: regexp.MustCompile("REMOVED"),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
|
@ -1,104 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math/rand"
|
||||
"strings"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func testResourceDiffSuppress() *schema.Resource {
|
||||
diffSuppress := func(k, old, new string, d *schema.ResourceData) bool {
|
||||
if old == "" || strings.Contains(new, "replace") {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
return &schema.Resource{
|
||||
Create: testResourceDiffSuppressCreate,
|
||||
Read: testResourceDiffSuppressRead,
|
||||
Delete: testResourceDiffSuppressDelete,
|
||||
Update: testResourceDiffSuppressUpdate,
|
||||
|
||||
Importer: &schema.ResourceImporter{
|
||||
State: schema.ImportStatePassthrough,
|
||||
},
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"optional": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
"val_to_upper": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
ForceNew: true,
|
||||
StateFunc: func(val interface{}) string {
|
||||
return strings.ToUpper(val.(string))
|
||||
},
|
||||
DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool {
|
||||
return strings.ToUpper(old) == strings.ToUpper(new)
|
||||
},
|
||||
},
|
||||
"network": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Default: "default",
|
||||
ForceNew: true,
|
||||
DiffSuppressFunc: diffSuppress,
|
||||
},
|
||||
"subnetwork": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
ForceNew: true,
|
||||
DiffSuppressFunc: diffSuppress,
|
||||
},
|
||||
|
||||
"node_pool": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
ForceNew: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"name": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func testResourceDiffSuppressCreate(d *schema.ResourceData, meta interface{}) error {
|
||||
d.Set("network", "modified")
|
||||
d.Set("subnetwork", "modified")
|
||||
|
||||
if _, ok := d.GetOk("node_pool"); !ok {
|
||||
d.Set("node_pool", []string{})
|
||||
}
|
||||
|
||||
id := fmt.Sprintf("%x", rand.Int63())
|
||||
d.SetId(id)
|
||||
return nil
|
||||
}
|
||||
|
||||
func testResourceDiffSuppressRead(d *schema.ResourceData, meta interface{}) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func testResourceDiffSuppressUpdate(d *schema.ResourceData, meta interface{}) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func testResourceDiffSuppressDelete(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId("")
|
||||
return nil
|
||||
}
|
|
@ -1,126 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/addrs"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
)
|
||||
|
||||
func TestResourceDiffSuppress_create(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_diff_suppress" "foo" {
|
||||
val_to_upper = "foo"
|
||||
}
|
||||
`),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
func TestResourceDiffSuppress_update(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_diff_suppress" "foo" {
|
||||
val_to_upper = "foo"
|
||||
}
|
||||
`),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_diff_suppress" "foo" {
|
||||
val_to_upper = "bar"
|
||||
optional = "more"
|
||||
}
|
||||
`),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceDiffSuppress_updateIgnoreChanges(t *testing.T) {
|
||||
// None of these steps should replace the instance
|
||||
id := ""
|
||||
checkFunc := func(s *terraform.State) error {
|
||||
root := s.ModuleByPath(addrs.RootModuleInstance)
|
||||
res := root.Resources["test_resource_diff_suppress.foo"]
|
||||
if id != "" && res.Primary.ID != id {
|
||||
return errors.New("expected no resource replacement")
|
||||
}
|
||||
id = res.Primary.ID
|
||||
return nil
|
||||
}
|
||||
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_diff_suppress" "foo" {
|
||||
val_to_upper = "foo"
|
||||
|
||||
network = "foo"
|
||||
subnetwork = "foo"
|
||||
|
||||
node_pool {
|
||||
name = "default-pool"
|
||||
}
|
||||
lifecycle {
|
||||
ignore_changes = ["node_pool"]
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: checkFunc,
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_diff_suppress" "foo" {
|
||||
val_to_upper = "foo"
|
||||
|
||||
network = "ignored"
|
||||
subnetwork = "ignored"
|
||||
|
||||
node_pool {
|
||||
name = "default-pool"
|
||||
}
|
||||
lifecycle {
|
||||
ignore_changes = ["node_pool"]
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: checkFunc,
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_diff_suppress" "foo" {
|
||||
val_to_upper = "foo"
|
||||
|
||||
network = "ignored"
|
||||
subnetwork = "ignored"
|
||||
|
||||
node_pool {
|
||||
name = "ignored"
|
||||
}
|
||||
lifecycle {
|
||||
ignore_changes = ["node_pool"]
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: checkFunc,
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
|
@ -1,39 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func testResourceForceNew() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Create: testResourceForceNewCreate,
|
||||
Read: testResourceForceNewRead,
|
||||
Delete: testResourceForceNewDelete,
|
||||
|
||||
Importer: &schema.ResourceImporter{
|
||||
State: schema.ImportStatePassthrough,
|
||||
},
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"triggers": {
|
||||
Type: schema.TypeMap,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func testResourceForceNewCreate(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId("testId")
|
||||
return testResourceForceNewRead(d, meta)
|
||||
}
|
||||
|
||||
func testResourceForceNewRead(d *schema.ResourceData, meta interface{}) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func testResourceForceNewDelete(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId("")
|
||||
return nil
|
||||
}
|
|
@ -1,79 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
)
|
||||
|
||||
func TestResourceForceNew_create(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_force_new" "foo" {
|
||||
triggers = {
|
||||
"a" = "foo"
|
||||
}
|
||||
}`),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
func TestResourceForceNew_update(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_force_new" "foo" {
|
||||
triggers = {
|
||||
"a" = "foo"
|
||||
}
|
||||
}`),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_force_new" "foo" {
|
||||
triggers = {
|
||||
"a" = "bar"
|
||||
}
|
||||
}`),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_force_new" "foo" {
|
||||
triggers = {
|
||||
"b" = "bar"
|
||||
}
|
||||
}`),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceForceNew_remove(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_force_new" "foo" {
|
||||
triggers = {
|
||||
"a" = "bar"
|
||||
}
|
||||
}`),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_force_new" "foo" {
|
||||
} `),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
|
@ -1,64 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
// This is a test resource to help reproduce GH-12183. This issue came up
|
||||
// as a complex mixing of core + helper/schema and while we added core tests
|
||||
// to cover some of the cases, this test helps top it off with an end-to-end
|
||||
// test.
|
||||
func testResourceGH12183() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Create: testResourceCreate_gh12183,
|
||||
Read: testResourceRead_gh12183,
|
||||
Update: testResourceUpdate_gh12183,
|
||||
Delete: testResourceDelete_gh12183,
|
||||
Schema: map[string]*schema.Schema{
|
||||
"key": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
|
||||
"config": &schema.Schema{
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
MinItems: 1,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"name": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
|
||||
"rules": {
|
||||
Type: schema.TypeSet,
|
||||
Computed: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
Set: schema.HashString,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func testResourceCreate_gh12183(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId("testId")
|
||||
return testResourceRead_gh12183(d, meta)
|
||||
}
|
||||
|
||||
func testResourceRead_gh12183(d *schema.ResourceData, meta interface{}) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func testResourceUpdate_gh12183(d *schema.ResourceData, meta interface{}) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func testResourceDelete_gh12183(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId("")
|
||||
return nil
|
||||
}
|
|
@ -1,40 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
)
|
||||
|
||||
// Tests GH-12183. This would previously cause a crash. More granular
|
||||
// unit tests are scattered through helper/schema and terraform core for
|
||||
// this.
|
||||
func TestResourceGH12183_basic(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_gh12183" "a" {
|
||||
config {
|
||||
name = "hello"
|
||||
}
|
||||
}
|
||||
|
||||
resource "test_resource_gh12183" "b" {
|
||||
key = "${lookup(test_resource_gh12183.a.config[0], "name")}"
|
||||
config {
|
||||
name = "required"
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: func(s *terraform.State) error {
|
||||
return nil
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
|
@ -1,192 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func testResourceList() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Create: testResourceListCreate,
|
||||
Read: testResourceListRead,
|
||||
Update: testResourceListUpdate,
|
||||
Delete: testResourceListDelete,
|
||||
|
||||
CustomizeDiff: func(d *schema.ResourceDiff, _ interface{}) error {
|
||||
if d.HasChange("dependent_list") {
|
||||
d.SetNewComputed("computed_list")
|
||||
}
|
||||
return nil
|
||||
},
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"list_block": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"string": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
"int": {
|
||||
Type: schema.TypeInt,
|
||||
Optional: true,
|
||||
},
|
||||
"force_new": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
"sublist": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
Elem: &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
},
|
||||
},
|
||||
"sublist_block": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
ForceNew: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"string": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
"int": {
|
||||
Type: schema.TypeInt,
|
||||
Required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"sublist_block_optional": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"list": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"dependent_list": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"val": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"computed_list": {
|
||||
Type: schema.TypeList,
|
||||
Computed: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
},
|
||||
"min_items": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
MinItems: 2,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"val": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"never_set": {
|
||||
Type: schema.TypeList,
|
||||
MaxItems: 1,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"sublist": {
|
||||
Type: schema.TypeList,
|
||||
MaxItems: 1,
|
||||
ForceNew: true,
|
||||
Optional: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"bool": {
|
||||
Type: schema.TypeBool,
|
||||
ForceNew: true,
|
||||
Required: true,
|
||||
},
|
||||
"string": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"map_list": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeMap},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func testResourceListCreate(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId("testId")
|
||||
return testResourceListRead(d, meta)
|
||||
}
|
||||
|
||||
func testResourceListRead(d *schema.ResourceData, meta interface{}) error {
|
||||
fixedIps := d.Get("dependent_list")
|
||||
|
||||
// all_fixed_ips should be set as computed with a CustomizeDiff func, but
|
||||
// we're trying to emulate legacy provider behavior, and updating a
|
||||
// computed field was a common case.
|
||||
ips := []interface{}{}
|
||||
if fixedIps != nil {
|
||||
for _, v := range fixedIps.([]interface{}) {
|
||||
m := v.(map[string]interface{})
|
||||
ips = append(ips, m["val"])
|
||||
}
|
||||
}
|
||||
if err := d.Set("computed_list", ips); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// "computing" these values should insert empty containers into the
|
||||
// never_set block.
|
||||
values := make(map[string]interface{})
|
||||
values["sublist"] = []interface{}{}
|
||||
d.Set("never_set", []interface{}{values})
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func testResourceListUpdate(d *schema.ResourceData, meta interface{}) error {
|
||||
block := d.Get("never_set").([]interface{})
|
||||
if len(block) > 0 {
|
||||
// if profiles contains any values, they should not be nil
|
||||
_ = block[0].(map[string]interface{})
|
||||
}
|
||||
return testResourceListRead(d, meta)
|
||||
}
|
||||
|
||||
func testResourceListDelete(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId("")
|
||||
return nil
|
||||
}
|
|
@ -1,192 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math/rand"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func testResourceListSet() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Create: testResourceListSetCreate,
|
||||
Read: testResourceListSetRead,
|
||||
Delete: testResourceListSetDelete,
|
||||
Update: testResourceListSetUpdate,
|
||||
|
||||
Importer: &schema.ResourceImporter{
|
||||
State: schema.ImportStatePassthrough,
|
||||
},
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"list": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
MaxItems: 1,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"set": {
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"elem": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
DiffSuppressFunc: func(_, o, n string, _ *schema.ResourceData) bool {
|
||||
return o == n
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Set: func(v interface{}) int {
|
||||
raw := v.(map[string]interface{})
|
||||
if el, ok := raw["elem"]; ok {
|
||||
return schema.HashString(el)
|
||||
}
|
||||
return 42
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"replication_configuration": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
MaxItems: 1,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"role": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
"rules": {
|
||||
Type: schema.TypeSet,
|
||||
Required: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"id": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
"destination": {
|
||||
Type: schema.TypeSet,
|
||||
MaxItems: 1,
|
||||
MinItems: 1,
|
||||
Required: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"account_id": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
"bucket": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
"storage_class": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
"replica_kms_key_id": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
"access_control_translation": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
MinItems: 1,
|
||||
MaxItems: 1,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"owner": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"source_selection_criteria": {
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
MinItems: 1,
|
||||
MaxItems: 1,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"sse_kms_encrypted_objects": {
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
MinItems: 1,
|
||||
MaxItems: 1,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"enabled": {
|
||||
Type: schema.TypeBool,
|
||||
Required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"prefix": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
"status": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
"priority": {
|
||||
Type: schema.TypeInt,
|
||||
Optional: true,
|
||||
},
|
||||
"filter": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
MinItems: 1,
|
||||
MaxItems: 1,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"prefix": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
"tags": {
|
||||
Type: schema.TypeMap,
|
||||
Optional: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func testResourceListSetCreate(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId(fmt.Sprintf("%x", rand.Int63()))
|
||||
return testResourceListSetRead(d, meta)
|
||||
}
|
||||
|
||||
func testResourceListSetUpdate(d *schema.ResourceData, meta interface{}) error {
|
||||
return testResourceListSetRead(d, meta)
|
||||
}
|
||||
|
||||
func testResourceListSetRead(d *schema.ResourceData, meta interface{}) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func testResourceListSetDelete(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId("")
|
||||
return nil
|
||||
}
|
|
@ -1,118 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
)
|
||||
|
||||
func TestResourceListSet_basic(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_list_set" "foo" {
|
||||
list {
|
||||
set {
|
||||
elem = "A"
|
||||
}
|
||||
set {
|
||||
elem = "B"
|
||||
}
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr("test_resource_list_set.foo", "list.0.set.1255198513.elem", "B"),
|
||||
resource.TestCheckResourceAttr("test_resource_list_set.foo", "list.0.set.3554254475.elem", "A"),
|
||||
resource.TestCheckResourceAttr("test_resource_list_set.foo", "list.0.set.#", "2"),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_list_set" "foo" {
|
||||
list {
|
||||
set {
|
||||
elem = "B"
|
||||
}
|
||||
set {
|
||||
elem = "C"
|
||||
}
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr("test_resource_list_set.foo", "list.0.set.1255198513.elem", "B"),
|
||||
resource.TestCheckResourceAttr("test_resource_list_set.foo", "list.0.set.1037565863.elem", "C"),
|
||||
resource.TestCheckResourceAttr("test_resource_list_set.foo", "list.0.set.#", "2"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceListSet_updateNested(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_list_set" "foo" {
|
||||
replication_configuration {
|
||||
role = "role_id"
|
||||
rules {
|
||||
id = "foobar"
|
||||
status = "Enabled"
|
||||
priority = 42
|
||||
filter {
|
||||
tags = {
|
||||
ReplicateMe = "Yes"
|
||||
}
|
||||
}
|
||||
destination {
|
||||
bucket = "bucket_id"
|
||||
storage_class = "STANDARD"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr("test_resource_list_set.foo", "replication_configuration.0.rules.#", "1"),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_list_set" "foo" {
|
||||
replication_configuration {
|
||||
role = "role_id"
|
||||
rules {
|
||||
id = "foobar"
|
||||
status = "Enabled"
|
||||
priority = 42
|
||||
filter {
|
||||
prefix = "foo"
|
||||
tags = {
|
||||
ReplicateMe = "Yes"
|
||||
AnotherTag = "OK"
|
||||
}
|
||||
}
|
||||
destination {
|
||||
bucket = "bucket_id"
|
||||
storage_class = "STANDARD"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr("test_resource_list_set.foo", "replication_configuration.0.rules.#", "1"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
|
@ -1,566 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
)
|
||||
|
||||
// an empty config should be ok, because no deprecated/removed fields are set.
|
||||
func TestResourceList_changed(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_list" "foo" {
|
||||
list_block {
|
||||
string = "a"
|
||||
int = 1
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_list.foo", "list_block.#", "1",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_list.foo", "list_block.0.string", "a",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_list.foo", "list_block.0.int", "1",
|
||||
),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_list" "foo" {
|
||||
list_block {
|
||||
string = "a"
|
||||
int = 1
|
||||
}
|
||||
|
||||
list_block {
|
||||
string = "b"
|
||||
int = 2
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_list.foo", "list_block.#", "2",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_list.foo", "list_block.0.string", "a",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_list.foo", "list_block.0.int", "1",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_list.foo", "list_block.1.string", "b",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_list.foo", "list_block.1.int", "2",
|
||||
),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_list" "foo" {
|
||||
list_block {
|
||||
string = "a"
|
||||
int = 1
|
||||
}
|
||||
|
||||
list_block {
|
||||
string = "c"
|
||||
int = 2
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_list.foo", "list_block.#", "2",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_list.foo", "list_block.0.string", "a",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_list.foo", "list_block.0.int", "1",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_list.foo", "list_block.1.string", "c",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_list.foo", "list_block.1.int", "2",
|
||||
),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceList_mapList(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
variable "map" {
|
||||
type = map(string)
|
||||
default = {}
|
||||
}
|
||||
|
||||
resource "test_resource_list" "foo" {
|
||||
map_list = [
|
||||
{
|
||||
a = "1"
|
||||
},
|
||||
var.map
|
||||
]
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_list.foo", "map_list.1", "",
|
||||
),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceList_sublist(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_list" "foo" {
|
||||
list_block {
|
||||
sublist_block {
|
||||
string = "a"
|
||||
int = 1
|
||||
}
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_list.foo", "list_block.0.sublist_block.#", "1",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_list.foo", "list_block.0.sublist_block.0.string", "a",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_list.foo", "list_block.0.sublist_block.0.int", "1",
|
||||
),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceList_interpolationChanges(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_list" "foo" {
|
||||
list_block {
|
||||
string = "x"
|
||||
}
|
||||
}
|
||||
resource "test_resource_list" "bar" {
|
||||
list_block {
|
||||
string = test_resource_list.foo.id
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_list.foo", "list_block.0.string", "x",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_list.bar", "list_block.0.string", "testId",
|
||||
),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_list" "baz" {
|
||||
list_block {
|
||||
string = "x"
|
||||
int = 1
|
||||
}
|
||||
}
|
||||
resource "test_resource_list" "bar" {
|
||||
list_block {
|
||||
string = test_resource_list.baz.id
|
||||
int = 3
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_list.baz", "list_block.0.string", "x",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_list.bar", "list_block.0.string", "testId",
|
||||
),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceList_removedForcesNew(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_list" "foo" {
|
||||
list_block {
|
||||
force_new = "ok"
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_list.foo", "list_block.0.force_new", "ok",
|
||||
),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_list" "foo" {
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceList_emptyStrings(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_list" "foo" {
|
||||
list_block {
|
||||
sublist = ["a", ""]
|
||||
}
|
||||
|
||||
list_block {
|
||||
sublist = [""]
|
||||
}
|
||||
|
||||
list_block {
|
||||
sublist = ["", "c", ""]
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr("test_resource_list.foo", "list_block.0.sublist.0", "a"),
|
||||
resource.TestCheckResourceAttr("test_resource_list.foo", "list_block.0.sublist.1", ""),
|
||||
resource.TestCheckResourceAttr("test_resource_list.foo", "list_block.1.sublist.0", ""),
|
||||
resource.TestCheckResourceAttr("test_resource_list.foo", "list_block.2.sublist.0", ""),
|
||||
resource.TestCheckResourceAttr("test_resource_list.foo", "list_block.2.sublist.1", "c"),
|
||||
resource.TestCheckResourceAttr("test_resource_list.foo", "list_block.2.sublist.2", ""),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_list" "foo" {
|
||||
list_block {
|
||||
sublist = [""]
|
||||
}
|
||||
|
||||
list_block {
|
||||
sublist = []
|
||||
}
|
||||
|
||||
list_block {
|
||||
sublist = ["", "c"]
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr("test_resource_list.foo", "list_block.0.sublist.#", "1"),
|
||||
resource.TestCheckResourceAttr("test_resource_list.foo", "list_block.0.sublist.0", ""),
|
||||
resource.TestCheckResourceAttr("test_resource_list.foo", "list_block.1.sublist.#", "0"),
|
||||
resource.TestCheckResourceAttr("test_resource_list.foo", "list_block.2.sublist.1", "c"),
|
||||
resource.TestCheckResourceAttr("test_resource_list.foo", "list_block.2.sublist.#", "2"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceList_addRemove(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_list" "foo" {
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr("test_resource_list.foo", "computed_list.#", "0"),
|
||||
resource.TestCheckResourceAttr("test_resource_list.foo", "dependent_list.#", "0"),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_list" "foo" {
|
||||
dependent_list {
|
||||
val = "a"
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr("test_resource_list.foo", "computed_list.#", "1"),
|
||||
resource.TestCheckResourceAttr("test_resource_list.foo", "dependent_list.#", "1"),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_list" "foo" {
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr("test_resource_list.foo", "computed_list.#", "0"),
|
||||
resource.TestCheckResourceAttr("test_resource_list.foo", "dependent_list.#", "0"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceList_planUnknownInterpolation(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_list" "foo" {
|
||||
list_block {
|
||||
string = "x"
|
||||
}
|
||||
}
|
||||
resource "test_resource_list" "bar" {
|
||||
list_block {
|
||||
sublist = [
|
||||
test_resource_list.foo.list_block[0].string,
|
||||
]
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_list.bar", "list_block.0.sublist.0", "x",
|
||||
),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_list" "foo" {
|
||||
list_block {
|
||||
string = "x"
|
||||
}
|
||||
dependent_list {
|
||||
val = "y"
|
||||
}
|
||||
}
|
||||
resource "test_resource_list" "bar" {
|
||||
list_block {
|
||||
sublist = [
|
||||
test_resource_list.foo.computed_list[0],
|
||||
]
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_list.bar", "list_block.0.sublist.0", "y",
|
||||
),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_list" "foo" {
|
||||
list_block {
|
||||
string = "x"
|
||||
}
|
||||
dependent_list {
|
||||
val = "z"
|
||||
}
|
||||
}
|
||||
resource "test_resource_list" "bar" {
|
||||
list_block {
|
||||
sublist = [
|
||||
test_resource_list.foo.computed_list[0],
|
||||
]
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_list.bar", "list_block.0.sublist.0", "z",
|
||||
),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceList_planUnknownInterpolationList(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_list" "foo" {
|
||||
dependent_list {
|
||||
val = "y"
|
||||
}
|
||||
}
|
||||
resource "test_resource_list" "bar" {
|
||||
list_block {
|
||||
sublist_block_optional {
|
||||
list = test_resource_list.foo.computed_list
|
||||
}
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_list.bar", "list_block.0.sublist_block_optional.0.list.0", "y",
|
||||
),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_list" "foo" {
|
||||
dependent_list {
|
||||
val = "z"
|
||||
}
|
||||
}
|
||||
resource "test_resource_list" "bar" {
|
||||
list_block {
|
||||
sublist_block_optional {
|
||||
list = test_resource_list.foo.computed_list
|
||||
}
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_list.bar", "list_block.0.sublist_block_optional.0.list.0", "z",
|
||||
),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceList_dynamicList(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_list" "a" {
|
||||
dependent_list {
|
||||
val = "a"
|
||||
}
|
||||
|
||||
dependent_list {
|
||||
val = "b"
|
||||
}
|
||||
}
|
||||
resource "test_resource_list" "b" {
|
||||
list_block {
|
||||
string = "constant"
|
||||
}
|
||||
dynamic "list_block" {
|
||||
for_each = test_resource_list.a.computed_list
|
||||
content {
|
||||
string = list_block.value
|
||||
}
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceList_dynamicMinItems(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
variable "a" {
|
||||
type = list(number)
|
||||
default = [1]
|
||||
}
|
||||
|
||||
resource "test_resource_list" "b" {
|
||||
dynamic "min_items" {
|
||||
for_each = var.a
|
||||
content {
|
||||
val = "foo"
|
||||
}
|
||||
}
|
||||
}
|
||||
`),
|
||||
ExpectError: regexp.MustCompile(`attribute supports 2`),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_list" "a" {
|
||||
dependent_list {
|
||||
val = "a"
|
||||
}
|
||||
|
||||
dependent_list {
|
||||
val = "b"
|
||||
}
|
||||
}
|
||||
resource "test_resource_list" "b" {
|
||||
list_block {
|
||||
string = "constant"
|
||||
}
|
||||
dynamic "min_items" {
|
||||
for_each = test_resource_list.a.computed_list
|
||||
content {
|
||||
val = min_items.value
|
||||
}
|
||||
}
|
||||
}
|
||||
`),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
|
@ -1,77 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/hashicorp/terraform/configs/hcl2shim"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func testResourceMap() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Create: testResourceMapCreate,
|
||||
Read: testResourceMapRead,
|
||||
Update: testResourceMapUpdate,
|
||||
Delete: testResourceMapDelete,
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"name": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
"map_of_three": {
|
||||
Type: schema.TypeMap,
|
||||
Optional: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
ValidateFunc: func(v interface{}, _ string) ([]string, []error) {
|
||||
errs := []error{}
|
||||
for k, v := range v.(map[string]interface{}) {
|
||||
if v == hcl2shim.UnknownVariableValue {
|
||||
errs = append(errs, fmt.Errorf("unknown value in ValidateFunc: %q=%q", k, v))
|
||||
}
|
||||
}
|
||||
return nil, errs
|
||||
},
|
||||
},
|
||||
"map_values": {
|
||||
Type: schema.TypeMap,
|
||||
Optional: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
},
|
||||
"computed_map": {
|
||||
Type: schema.TypeMap,
|
||||
Computed: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func testResourceMapCreate(d *schema.ResourceData, meta interface{}) error {
|
||||
// make sure all elements are passed to the map
|
||||
m := d.Get("map_of_three").(map[string]interface{})
|
||||
if len(m) != 3 {
|
||||
return fmt.Errorf("expected 3 map values, got %#v\n", m)
|
||||
}
|
||||
|
||||
d.SetId("testId")
|
||||
return testResourceMapRead(d, meta)
|
||||
}
|
||||
|
||||
func testResourceMapRead(d *schema.ResourceData, meta interface{}) error {
|
||||
var computedMap map[string]interface{}
|
||||
if v, ok := d.GetOk("map_values"); ok {
|
||||
computedMap = v.(map[string]interface{})
|
||||
}
|
||||
d.Set("computed_map", computedMap)
|
||||
return nil
|
||||
}
|
||||
|
||||
func testResourceMapUpdate(d *schema.ResourceData, meta interface{}) error {
|
||||
return testResourceMapRead(d, meta)
|
||||
}
|
||||
|
||||
func testResourceMapDelete(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId("")
|
||||
return nil
|
||||
}
|
|
@ -1,138 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
)
|
||||
|
||||
func TestResourceMap_basic(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
Config: `
|
||||
resource "test_resource_map" "foobar" {
|
||||
name = "test"
|
||||
map_of_three = {
|
||||
one = "one"
|
||||
two = "two"
|
||||
empty = ""
|
||||
}
|
||||
}`,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_map.foobar", "map_of_three.empty", "",
|
||||
),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceMap_basicWithVars(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
Config: `
|
||||
variable "a" {
|
||||
default = "a"
|
||||
}
|
||||
|
||||
variable "b" {
|
||||
default = "b"
|
||||
}
|
||||
|
||||
resource "test_resource_map" "foobar" {
|
||||
name = "test"
|
||||
map_of_three = {
|
||||
one = var.a
|
||||
two = var.b
|
||||
empty = ""
|
||||
}
|
||||
}`,
|
||||
Check: resource.ComposeTestCheckFunc(),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceMap_computedMap(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
Config: `
|
||||
resource "test_resource_map" "foobar" {
|
||||
name = "test"
|
||||
map_of_three = {
|
||||
one = "one"
|
||||
two = "two"
|
||||
empty = ""
|
||||
}
|
||||
map_values = {
|
||||
a = "1"
|
||||
b = "2"
|
||||
}
|
||||
}`,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_map.foobar", "computed_map.a", "1",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_map.foobar", "computed_map.b", "2",
|
||||
),
|
||||
),
|
||||
},
|
||||
{
|
||||
Config: `
|
||||
resource "test_resource_map" "foobar" {
|
||||
name = "test"
|
||||
map_of_three = {
|
||||
one = "one"
|
||||
two = "two"
|
||||
empty = ""
|
||||
}
|
||||
map_values = {
|
||||
a = "3"
|
||||
b = "4"
|
||||
}
|
||||
}`,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_map.foobar", "computed_map.a", "3",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_map.foobar", "computed_map.b", "4",
|
||||
),
|
||||
),
|
||||
},
|
||||
{
|
||||
Config: `
|
||||
resource "test_resource_map" "foobar" {
|
||||
name = "test"
|
||||
map_of_three = {
|
||||
one = "one"
|
||||
two = "two"
|
||||
empty = ""
|
||||
}
|
||||
map_values = {
|
||||
a = "3"
|
||||
}
|
||||
}`,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_map.foobar", "computed_map.a", "3",
|
||||
),
|
||||
resource.TestCheckNoResourceAttr(
|
||||
"test_resource_map.foobar", "computed_map.b",
|
||||
),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
|
@ -1,114 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math/rand"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func testResourceNested() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Create: testResourceNestedCreate,
|
||||
Read: testResourceNestedRead,
|
||||
Delete: testResourceNestedDelete,
|
||||
|
||||
Importer: &schema.ResourceImporter{
|
||||
State: schema.ImportStatePassthrough,
|
||||
},
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"optional": {
|
||||
Type: schema.TypeBool,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
"nested": {
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"string": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
"optional": {
|
||||
Type: schema.TypeBool,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
"nested_again": {
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"string": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"list_block": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
MaxItems: 1,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"sub_list_block": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
MaxItems: 1,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"bool": {
|
||||
Type: schema.TypeBool,
|
||||
Optional: true,
|
||||
},
|
||||
"set": {
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func testResourceNestedCreate(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId(fmt.Sprintf("%x", rand.Int63()))
|
||||
return testResourceNestedRead(d, meta)
|
||||
}
|
||||
|
||||
func testResourceNestedUpdate(d *schema.ResourceData, meta interface{}) error {
|
||||
return testResourceNestedRead(d, meta)
|
||||
}
|
||||
|
||||
func testResourceNestedRead(d *schema.ResourceData, meta interface{}) error {
|
||||
set := []map[string]interface{}{map[string]interface{}{
|
||||
"sub_list_block": []map[string]interface{}{map[string]interface{}{
|
||||
"bool": false,
|
||||
"set": schema.NewSet(schema.HashString, nil),
|
||||
}},
|
||||
}}
|
||||
d.Set("list_block", set)
|
||||
return nil
|
||||
}
|
||||
|
||||
func testResourceNestedDelete(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId("")
|
||||
return nil
|
||||
}
|
|
@ -1,48 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func testResourceNestedId() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Create: testResourceNestedIdCreate,
|
||||
Read: testResourceNestedIdRead,
|
||||
Update: testResourceNestedIdUpdate,
|
||||
Delete: testResourceNestedIdDelete,
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"list_block": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"id": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func testResourceNestedIdCreate(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId("testId")
|
||||
return nil
|
||||
}
|
||||
|
||||
func testResourceNestedIdRead(d *schema.ResourceData, meta interface{}) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func testResourceNestedIdUpdate(d *schema.ResourceData, meta interface{}) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func testResourceNestedIdDelete(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId("")
|
||||
return nil
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
)
|
||||
|
||||
func TestResourceNestedId_unknownId(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested_id" "foo" {
|
||||
}
|
||||
resource "test_resource_nested_id" "bar" {
|
||||
list_block {
|
||||
id = test_resource_nested_id.foo.id
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr("test_resource_nested_id.bar", "list_block.0.id", "testId"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
|
@ -1,171 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math/rand"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func testResourceNestedSet() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Create: testResourceNestedSetCreate,
|
||||
Read: testResourceNestedSetRead,
|
||||
Delete: testResourceNestedSetDelete,
|
||||
Update: testResourceNestedSetUpdate,
|
||||
|
||||
Importer: &schema.ResourceImporter{
|
||||
State: schema.ImportStatePassthrough,
|
||||
},
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"optional": {
|
||||
Type: schema.TypeBool,
|
||||
Optional: true,
|
||||
},
|
||||
"force_new": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
"type_list": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"value": {
|
||||
Type: schema.TypeString,
|
||||
ForceNew: true,
|
||||
Optional: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"single": {
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
MaxItems: 1,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"value": {
|
||||
Type: schema.TypeString,
|
||||
ForceNew: true,
|
||||
Required: true,
|
||||
},
|
||||
|
||||
"optional": {
|
||||
Type: schema.TypeString,
|
||||
ForceNew: true,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"multi": {
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"set": {
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"required": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
"optional_int": {
|
||||
Type: schema.TypeInt,
|
||||
Optional: true,
|
||||
},
|
||||
"bool": {
|
||||
Type: schema.TypeBool,
|
||||
Optional: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
"optional": {
|
||||
Type: schema.TypeString,
|
||||
// commenting this causes it to get missed during apply
|
||||
//ForceNew: true,
|
||||
Optional: true,
|
||||
},
|
||||
"bool": {
|
||||
Type: schema.TypeBool,
|
||||
Optional: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"with_list": {
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"required": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
"list": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
Elem: &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
},
|
||||
},
|
||||
"list_block": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"unused": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func testResourceNestedSetCreate(d *schema.ResourceData, meta interface{}) error {
|
||||
id := fmt.Sprintf("%x", rand.Int63())
|
||||
d.SetId(id)
|
||||
|
||||
// replicate some awkward handling of a computed value in a set
|
||||
set := d.Get("single").(*schema.Set)
|
||||
l := set.List()
|
||||
if len(l) == 1 {
|
||||
if s, ok := l[0].(map[string]interface{}); ok {
|
||||
if v, _ := s["optional"].(string); v == "" {
|
||||
s["optional"] = id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
d.Set("single", set)
|
||||
|
||||
return testResourceNestedSetRead(d, meta)
|
||||
}
|
||||
|
||||
func testResourceNestedSetRead(d *schema.ResourceData, meta interface{}) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func testResourceNestedSetDelete(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId("")
|
||||
return nil
|
||||
}
|
||||
|
||||
func testResourceNestedSetUpdate(d *schema.ResourceData, meta interface{}) error {
|
||||
return nil
|
||||
}
|
|
@ -1,653 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/addrs"
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
)
|
||||
|
||||
func TestResourceNestedSet_basic(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested_set" "foo" {
|
||||
single {
|
||||
value = "bar"
|
||||
}
|
||||
}
|
||||
`),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceNestedSet_basicImport(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested_set" "foo" {
|
||||
single {
|
||||
value = "bar"
|
||||
}
|
||||
}
|
||||
`),
|
||||
},
|
||||
resource.TestStep{
|
||||
ImportState: true,
|
||||
ResourceName: "test_resource_nested_set.foo",
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested_set" "foo" {
|
||||
single {
|
||||
value = "bar"
|
||||
}
|
||||
}
|
||||
`),
|
||||
ImportStateCheck: func(ss []*terraform.InstanceState) error {
|
||||
for _, s := range ss {
|
||||
if s.Attributes["multi.#"] != "0" ||
|
||||
s.Attributes["single.#"] != "0" ||
|
||||
s.Attributes["type_list.#"] != "0" ||
|
||||
s.Attributes["with_list.#"] != "0" {
|
||||
return fmt.Errorf("missing blocks in imported state:\n%s", s)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// The set should not be generated because of it's computed value
|
||||
func TestResourceNestedSet_noSet(t *testing.T) {
|
||||
checkFunc := func(s *terraform.State) error {
|
||||
root := s.ModuleByPath(addrs.RootModuleInstance)
|
||||
res := root.Resources["test_resource_nested_set.foo"]
|
||||
for k, v := range res.Primary.Attributes {
|
||||
if strings.HasPrefix(k, "single") && k != "single.#" {
|
||||
return fmt.Errorf("unexpected set value: %s:%s", k, v)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested_set" "foo" {
|
||||
}
|
||||
`),
|
||||
Check: checkFunc,
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// the empty type_list must be passed to the provider with 1 nil element
|
||||
func TestResourceNestedSet_emptyBlock(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested_set" "foo" {
|
||||
type_list {
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr("test_resource_nested_set.foo", "type_list.#", "1"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceNestedSet_emptyNestedListBlock(t *testing.T) {
|
||||
checkFunc := func(s *terraform.State) error {
|
||||
root := s.ModuleByPath(addrs.RootModuleInstance)
|
||||
res := root.Resources["test_resource_nested_set.foo"]
|
||||
found := false
|
||||
for k := range res.Primary.Attributes {
|
||||
if !regexp.MustCompile(`^with_list\.\d+\.list_block\.`).MatchString(k) {
|
||||
continue
|
||||
}
|
||||
found = true
|
||||
}
|
||||
if !found {
|
||||
return fmt.Errorf("with_list.X.list_block not found")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested_set" "foo" {
|
||||
with_list {
|
||||
required = "ok"
|
||||
list_block {
|
||||
}
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: checkFunc,
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
func TestResourceNestedSet_emptyNestedList(t *testing.T) {
|
||||
checkFunc := func(s *terraform.State) error {
|
||||
root := s.ModuleByPath(addrs.RootModuleInstance)
|
||||
res := root.Resources["test_resource_nested_set.foo"]
|
||||
found := false
|
||||
for k, v := range res.Primary.Attributes {
|
||||
if regexp.MustCompile(`^with_list\.\d+\.list\.#$`).MatchString(k) {
|
||||
found = true
|
||||
if v != "0" {
|
||||
return fmt.Errorf("expected empty list: %s, got %s", k, v)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
return fmt.Errorf("with_list.X.nested_list not found")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested_set" "foo" {
|
||||
with_list {
|
||||
required = "ok"
|
||||
list = []
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: checkFunc,
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceNestedSet_addRemove(t *testing.T) {
|
||||
var id string
|
||||
checkFunc := func(s *terraform.State) error {
|
||||
root := s.ModuleByPath(addrs.RootModuleInstance)
|
||||
res := root.Resources["test_resource_nested_set.foo"]
|
||||
if res.Primary.ID == id {
|
||||
return errors.New("expected new resource")
|
||||
}
|
||||
id = res.Primary.ID
|
||||
return nil
|
||||
}
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested_set" "foo" {
|
||||
}
|
||||
`),
|
||||
Check: checkFunc,
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested_set" "foo" {
|
||||
single {
|
||||
value = "bar"
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
checkFunc,
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_nested_set.foo", "single.#", "1",
|
||||
),
|
||||
// the hash of single seems to change here, so we're not
|
||||
// going to test for "value" directly
|
||||
// FIXME: figure out why the set hash changes
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested_set" "foo" {
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_nested_set.foo", "single.#", "0",
|
||||
),
|
||||
checkFunc,
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested_set" "foo" {
|
||||
single {
|
||||
value = "bar"
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: checkFunc,
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested_set" "foo" {
|
||||
single {
|
||||
value = "bar"
|
||||
optional = "baz"
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: checkFunc,
|
||||
},
|
||||
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested_set" "foo" {
|
||||
}
|
||||
`),
|
||||
Check: checkFunc,
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
func TestResourceNestedSet_multiAddRemove(t *testing.T) {
|
||||
checkFunc := func(s *terraform.State) error {
|
||||
return nil
|
||||
}
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested_set" "foo" {
|
||||
}
|
||||
`),
|
||||
Check: checkFunc,
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested_set" "foo" {
|
||||
multi {
|
||||
optional = "bar"
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: checkFunc,
|
||||
},
|
||||
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested_set" "foo" {
|
||||
}
|
||||
`),
|
||||
Check: checkFunc,
|
||||
},
|
||||
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested_set" "foo" {
|
||||
multi {
|
||||
set {
|
||||
required = "val"
|
||||
}
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: checkFunc,
|
||||
},
|
||||
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested_set" "foo" {
|
||||
multi {
|
||||
set {
|
||||
required = "new"
|
||||
}
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: checkFunc,
|
||||
},
|
||||
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested_set" "foo" {
|
||||
multi {
|
||||
set {
|
||||
required = "new"
|
||||
optional_int = 3
|
||||
}
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: checkFunc,
|
||||
},
|
||||
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested_set" "foo" {
|
||||
single {
|
||||
value = "bar"
|
||||
optional = "baz"
|
||||
}
|
||||
multi {
|
||||
set {
|
||||
required = "new"
|
||||
optional_int = 3
|
||||
}
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: checkFunc,
|
||||
},
|
||||
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested_set" "foo" {
|
||||
optional = true
|
||||
single {
|
||||
value = "bar"
|
||||
optional = "baz"
|
||||
}
|
||||
multi {
|
||||
set {
|
||||
required = "new"
|
||||
optional_int = 3
|
||||
}
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: checkFunc,
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceNestedSet_forceNewEmptyString(t *testing.T) {
|
||||
var id string
|
||||
step := 0
|
||||
checkFunc := func(s *terraform.State) error {
|
||||
root := s.ModuleByPath(addrs.RootModuleInstance)
|
||||
res := root.Resources["test_resource_nested_set.foo"]
|
||||
defer func() {
|
||||
step++
|
||||
id = res.Primary.ID
|
||||
}()
|
||||
|
||||
if step == 2 && res.Primary.ID == id {
|
||||
// setting an empty string currently does not trigger ForceNew, but
|
||||
// it should in the future.
|
||||
return nil
|
||||
}
|
||||
|
||||
if res.Primary.ID == id {
|
||||
return errors.New("expected new resource")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested_set" "foo" {
|
||||
multi {
|
||||
set {
|
||||
required = "val"
|
||||
}
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: checkFunc,
|
||||
},
|
||||
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested_set" "foo" {
|
||||
multi {
|
||||
set {
|
||||
required = ""
|
||||
}
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: checkFunc,
|
||||
},
|
||||
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested_set" "foo" {
|
||||
force_new = ""
|
||||
}
|
||||
`),
|
||||
Check: checkFunc,
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceNestedSet_setWithList(t *testing.T) {
|
||||
checkFunc := func(s *terraform.State) error {
|
||||
return nil
|
||||
}
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested_set" "foo" {
|
||||
with_list {
|
||||
required = "bar"
|
||||
list = ["initial value"]
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: checkFunc,
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested_set" "foo" {
|
||||
with_list {
|
||||
required = "bar"
|
||||
list = ["second value"]
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: checkFunc,
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// This is the same as forceNewEmptyString, but we start with the empty value,
|
||||
// instead of changing it.
|
||||
func TestResourceNestedSet_nestedSetEmptyString(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested_set" "foo" {
|
||||
multi {
|
||||
set {
|
||||
required = ""
|
||||
}
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_nested_set.foo", "multi.529860700.set.4196279896.required", "",
|
||||
),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceNestedSet_emptySet(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested_set" "foo" {
|
||||
multi {
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_nested_set.foo", "multi.#", "1",
|
||||
),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceNestedSet_multipleUnknownSetElements(t *testing.T) {
|
||||
checkFunc := func(s *terraform.State) error {
|
||||
return nil
|
||||
}
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested_set" "a" {
|
||||
}
|
||||
|
||||
resource "test_resource_nested_set" "b" {
|
||||
}
|
||||
|
||||
resource "test_resource_nested_set" "c" {
|
||||
multi {
|
||||
optional = test_resource_nested_set.a.id
|
||||
}
|
||||
multi {
|
||||
optional = test_resource_nested_set.b.id
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: checkFunc,
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceNestedSet_interpolationChanges(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested_set" "foo" {
|
||||
single {
|
||||
value = "x"
|
||||
}
|
||||
}
|
||||
resource "test_resource_nested_set" "bar" {
|
||||
single {
|
||||
value = test_resource_nested_set.foo.id
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_nested_set.foo", "single.#", "1",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_nested_set.bar", "single.#", "1",
|
||||
),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested_set" "baz" {
|
||||
single {
|
||||
value = "x"
|
||||
}
|
||||
}
|
||||
resource "test_resource_nested_set" "bar" {
|
||||
single {
|
||||
value = test_resource_nested_set.baz.id
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_nested_set.baz", "single.#", "1",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_nested_set.bar", "single.#", "1",
|
||||
),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceNestedSet_dynamicSetBlock(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource" "a" {
|
||||
required = "ok"
|
||||
required_map = {
|
||||
a = "b"
|
||||
}
|
||||
}
|
||||
|
||||
resource "test_resource_nested_set" "foo" {
|
||||
dynamic "with_list" {
|
||||
iterator = thing
|
||||
for_each = test_resource.a.computed_list
|
||||
content {
|
||||
required = thing.value
|
||||
list = [thing.key]
|
||||
}
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
|
@ -1,217 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/hashicorp/terraform/addrs"
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
)
|
||||
|
||||
func TestResourceNested_basic(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested" "foo" {
|
||||
nested {
|
||||
string = "val"
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_nested.foo", "nested.#", "1",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_nested.foo", "nested.1877647874.string", "val",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_nested.foo", "list_block.0.sub_list_block.0.bool", "false",
|
||||
),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceNested_addRemove(t *testing.T) {
|
||||
var id string
|
||||
idCheck := func(s *terraform.State) error {
|
||||
root := s.ModuleByPath(addrs.RootModuleInstance)
|
||||
res := root.Resources["test_resource_nested.foo"]
|
||||
if res.Primary.ID == id {
|
||||
return errors.New("expected new resource")
|
||||
}
|
||||
id = res.Primary.ID
|
||||
return nil
|
||||
}
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested" "foo" {
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
idCheck,
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_nested.foo", "nested.#", "0",
|
||||
),
|
||||
// Checking for a count of 0 and a nonexistent count should
|
||||
// now be the same operation.
|
||||
resource.TestCheckNoResourceAttr(
|
||||
"test_resource_nested.foo", "nested.#",
|
||||
),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested" "foo" {
|
||||
nested {
|
||||
string = "val"
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
idCheck,
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_nested.foo", "nested.1877647874.string", "val",
|
||||
),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested" "foo" {
|
||||
optional = true
|
||||
nested {
|
||||
string = "val"
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
idCheck,
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_nested.foo", "nested.1877647874.string", "val",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_nested.foo", "optional", "true",
|
||||
),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested" "foo" {
|
||||
nested {
|
||||
string = "val"
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
idCheck,
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_nested.foo", "nested.1877647874.string", "val",
|
||||
),
|
||||
resource.TestCheckNoResourceAttr(
|
||||
"test_resource_nested.foo", "optional",
|
||||
),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested" "foo" {
|
||||
nested {
|
||||
string = "val"
|
||||
optional = true
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
idCheck,
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_nested.foo", "nested.2994502535.string", "val",
|
||||
),
|
||||
resource.TestCheckResourceAttr(
|
||||
"test_resource_nested.foo", "nested.2994502535.optional", "true",
|
||||
),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested" "foo" {
|
||||
}
|
||||
`),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
idCheck,
|
||||
resource.TestCheckNoResourceAttr(
|
||||
"test_resource_nested.foo", "nested.#",
|
||||
),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceNested_dynamic(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_nested" "foo" {
|
||||
dynamic "nested" {
|
||||
for_each = [["a"], []]
|
||||
content {
|
||||
string = join(",", nested.value)
|
||||
optional = false
|
||||
dynamic "nested_again" {
|
||||
for_each = nested.value
|
||||
content {
|
||||
string = nested_again.value
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`),
|
||||
Check: func(s *terraform.State) error {
|
||||
rs, ok := s.RootModule().Resources["test_resource_nested.foo"]
|
||||
if !ok {
|
||||
return errors.New("missing resource in state")
|
||||
}
|
||||
|
||||
got := rs.Primary.Attributes
|
||||
want := map[string]string{
|
||||
"nested.#": "2",
|
||||
"nested.33842314.string": "a",
|
||||
"nested.33842314.optional": "false",
|
||||
"nested.33842314.nested_again.#": "1",
|
||||
"nested.33842314.nested_again.936590934.string": "a",
|
||||
"nested.140280279.string": "",
|
||||
"nested.140280279.optional": "false",
|
||||
"nested.140280279.nested_again.#": "0",
|
||||
"list_block.#": "1",
|
||||
"list_block.0.sub_list_block.#": "1",
|
||||
"list_block.0.sub_list_block.0.bool": "false",
|
||||
"list_block.0.sub_list_block.0.set.#": "0",
|
||||
}
|
||||
delete(got, "id") // it's random, so not useful for testing
|
||||
|
||||
if !cmp.Equal(got, want) {
|
||||
return errors.New("wrong result\n" + cmp.Diff(want, got))
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
|
@ -1,95 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func testResourceProviderMeta() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Create: testResourceProviderMetaCreate,
|
||||
Read: testResourceProviderMetaRead,
|
||||
Update: testResourceProviderMetaUpdate,
|
||||
Delete: testResourceProviderMetaDelete,
|
||||
|
||||
Importer: &schema.ResourceImporter{
|
||||
State: schema.ImportStatePassthrough,
|
||||
},
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"optional": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
type providerMeta struct {
|
||||
Foo string `cty:"foo"`
|
||||
}
|
||||
|
||||
func testResourceProviderMetaCreate(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId("testId")
|
||||
var m providerMeta
|
||||
|
||||
err := d.GetProviderMeta(&m)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if m.Foo != "bar" {
|
||||
return fmt.Errorf("expected provider_meta.foo to be %q, was %q",
|
||||
"bar", m.Foo)
|
||||
}
|
||||
|
||||
return testResourceProviderMetaRead(d, meta)
|
||||
}
|
||||
|
||||
func testResourceProviderMetaRead(d *schema.ResourceData, meta interface{}) error {
|
||||
var m providerMeta
|
||||
|
||||
err := d.GetProviderMeta(&m)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if m.Foo != "bar" {
|
||||
return fmt.Errorf("expected provider_meta.foo to be %q, was %q",
|
||||
"bar", m.Foo)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func testResourceProviderMetaUpdate(d *schema.ResourceData, meta interface{}) error {
|
||||
var m providerMeta
|
||||
|
||||
err := d.GetProviderMeta(&m)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if m.Foo != "bar" {
|
||||
return fmt.Errorf("expected provider_meta.foo to be %q, was %q",
|
||||
"bar", m.Foo)
|
||||
}
|
||||
return testResourceProviderMetaRead(d, meta)
|
||||
}
|
||||
|
||||
func testResourceProviderMetaDelete(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId("")
|
||||
var m providerMeta
|
||||
|
||||
err := d.GetProviderMeta(&m)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if m.Foo != "bar" {
|
||||
return fmt.Errorf("expected provider_meta.foo to be %q, was %q",
|
||||
"bar", m.Foo)
|
||||
}
|
||||
return nil
|
||||
}
|
|
@ -1,29 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
)
|
||||
|
||||
func TestResourceProviderMeta_basic(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
terraform {
|
||||
provider_meta "test" {
|
||||
foo = "bar"
|
||||
}
|
||||
}
|
||||
|
||||
resource "test_resource_provider_meta" "foo" {
|
||||
}
|
||||
`),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
|
@ -1,68 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func testResourceRequiredMin() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Create: testResourceRequiredMinCreate,
|
||||
Read: testResourceRequiredMinRead,
|
||||
Update: testResourceRequiredMinUpdate,
|
||||
Delete: testResourceRequiredMinDelete,
|
||||
|
||||
CustomizeDiff: func(d *schema.ResourceDiff, _ interface{}) error {
|
||||
if d.HasChange("dependent_list") {
|
||||
d.SetNewComputed("computed_list")
|
||||
}
|
||||
return nil
|
||||
},
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"min_items": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
MinItems: 2,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"val": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"required_min_items": {
|
||||
Type: schema.TypeList,
|
||||
Required: true,
|
||||
MinItems: 2,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"val": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func testResourceRequiredMinCreate(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId("testId")
|
||||
return testResourceRequiredMinRead(d, meta)
|
||||
}
|
||||
|
||||
func testResourceRequiredMinRead(d *schema.ResourceData, meta interface{}) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func testResourceRequiredMinUpdate(d *schema.ResourceData, meta interface{}) error {
|
||||
return testResourceRequiredMinRead(d, meta)
|
||||
}
|
||||
|
||||
func testResourceRequiredMinDelete(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId("")
|
||||
return nil
|
||||
}
|
|
@ -1,66 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
)
|
||||
|
||||
func TestResource_dynamicRequiredMinItems(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: `
|
||||
resource "test_resource_required_min" "a" {
|
||||
}
|
||||
`,
|
||||
ExpectError: regexp.MustCompile(`"required_min_items" blocks are required`),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_list" "a" {
|
||||
dependent_list {
|
||||
val = "a"
|
||||
}
|
||||
}
|
||||
|
||||
resource "test_resource_required_min" "b" {
|
||||
dynamic "required_min_items" {
|
||||
for_each = test_resource_list.a.computed_list
|
||||
content {
|
||||
val = required_min_items.value
|
||||
}
|
||||
}
|
||||
}
|
||||
`),
|
||||
ExpectError: regexp.MustCompile(`required_min_items: attribute supports 2 item as a minimum`),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_list" "c" {
|
||||
dependent_list {
|
||||
val = "a"
|
||||
}
|
||||
|
||||
dependent_list {
|
||||
val = "b"
|
||||
}
|
||||
}
|
||||
|
||||
resource "test_resource_required_min" "b" {
|
||||
dynamic "required_min_items" {
|
||||
for_each = test_resource_list.c.computed_list
|
||||
content {
|
||||
val = required_min_items.value
|
||||
}
|
||||
}
|
||||
}
|
||||
`),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
|
@ -1,43 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func testResourceSignal() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Create: testResourceSignalCreate,
|
||||
Read: testResourceSignalRead,
|
||||
Update: testResourceSignalUpdate,
|
||||
Delete: testResourceSignalDelete,
|
||||
|
||||
Importer: &schema.ResourceImporter{
|
||||
State: schema.ImportStatePassthrough,
|
||||
},
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"optional": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func testResourceSignalCreate(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId("testId")
|
||||
|
||||
return testResourceSignalRead(d, meta)
|
||||
}
|
||||
|
||||
func testResourceSignalRead(d *schema.ResourceData, meta interface{}) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func testResourceSignalUpdate(d *schema.ResourceData, meta interface{}) error {
|
||||
return testResourceSignalRead(d, meta)
|
||||
}
|
||||
|
||||
func testResourceSignalDelete(d *schema.ResourceData, meta interface{}) error {
|
||||
return nil
|
||||
}
|
|
@ -1,118 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"crypto/sha1"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"math/rand"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/hashcode"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func testResourceStateFunc() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Create: testResourceStateFuncCreate,
|
||||
Read: testResourceStateFuncRead,
|
||||
Update: testResourceStateFuncUpdate,
|
||||
Delete: testResourceStateFuncDelete,
|
||||
|
||||
Importer: &schema.ResourceImporter{
|
||||
State: schema.ImportStatePassthrough,
|
||||
},
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"optional": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
"state_func": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
StateFunc: stateFuncHash,
|
||||
},
|
||||
"state_func_value": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
|
||||
// set block with computed elements
|
||||
"set_block": {
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
Set: setBlockHash,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"required": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
"optional": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func stateFuncHash(v interface{}) string {
|
||||
hash := sha1.Sum([]byte(v.(string)))
|
||||
return hex.EncodeToString(hash[:])
|
||||
}
|
||||
|
||||
func setBlockHash(v interface{}) int {
|
||||
m := v.(map[string]interface{})
|
||||
required, _ := m["required"].(string)
|
||||
optional, _ := m["optional"].(string)
|
||||
return hashcode.String(fmt.Sprintf("%s|%s", required, optional))
|
||||
}
|
||||
|
||||
func testResourceStateFuncCreate(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId(fmt.Sprintf("%x", rand.Int63()))
|
||||
|
||||
// if we have a reference for the actual data in the state_func field,
|
||||
// compare it
|
||||
if data, ok := d.GetOk("state_func_value"); ok {
|
||||
expected := data.(string)
|
||||
got := d.Get("state_func").(string)
|
||||
if expected != got {
|
||||
return fmt.Errorf("expected state_func value:%q, got%q", expected, got)
|
||||
}
|
||||
}
|
||||
|
||||
// Check that we can lookup set elements by our computed hash.
|
||||
// This is not advised, but we can use this to make sure the final diff was
|
||||
// prepared with the correct values.
|
||||
setBlock, ok := d.GetOk("set_block")
|
||||
if ok {
|
||||
set := setBlock.(*schema.Set)
|
||||
for _, obj := range set.List() {
|
||||
idx := setBlockHash(obj)
|
||||
requiredAddr := fmt.Sprintf("%s.%d.%s", "set_block", idx, "required")
|
||||
_, ok := d.GetOkExists(requiredAddr)
|
||||
if !ok {
|
||||
return fmt.Errorf("failed to get attr %q from %#v", fmt.Sprintf(requiredAddr), d.State().Attributes)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return testResourceStateFuncRead(d, meta)
|
||||
}
|
||||
|
||||
func testResourceStateFuncRead(d *schema.ResourceData, meta interface{}) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func testResourceStateFuncUpdate(d *schema.ResourceData, meta interface{}) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func testResourceStateFuncDelete(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId("")
|
||||
return nil
|
||||
}
|
|
@ -1,85 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
)
|
||||
|
||||
func TestResourceStateFunc_basic(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_state_func" "foo" {
|
||||
}
|
||||
`),
|
||||
Check: resource.TestCheckNoResourceAttr("test_resource_state_func.foo", "state_func"),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_state_func" "foo" {
|
||||
state_func = "data"
|
||||
state_func_value = "data"
|
||||
}
|
||||
`),
|
||||
Check: resource.TestCheckResourceAttr("test_resource_state_func.foo", "state_func", stateFuncHash("data")),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_state_func" "foo" {
|
||||
}
|
||||
`),
|
||||
Check: resource.TestCheckNoResourceAttr("test_resource_state_func.foo", "state_func"),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_state_func" "foo" {
|
||||
optional = "added"
|
||||
state_func = "data"
|
||||
state_func_value = "data"
|
||||
}
|
||||
`),
|
||||
Check: resource.TestCheckResourceAttr("test_resource_state_func.foo", "state_func", stateFuncHash("data")),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_state_func" "foo" {
|
||||
optional = "added"
|
||||
state_func = "changed"
|
||||
state_func_value = "changed"
|
||||
}
|
||||
`),
|
||||
Check: resource.TestCheckResourceAttr("test_resource_state_func.foo", "state_func", stateFuncHash("changed")),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceStateFunc_getOkSetElem(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_state_func" "foo" {
|
||||
}
|
||||
|
||||
resource "test_resource_state_func" "bar" {
|
||||
set_block {
|
||||
required = "foo"
|
||||
optional = test_resource_state_func.foo.id
|
||||
}
|
||||
set_block {
|
||||
required = test_resource_state_func.foo.id
|
||||
}
|
||||
}
|
||||
`),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -1,125 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func testResourceTimeout() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Create: testResourceTimeoutCreate,
|
||||
Read: testResourceTimeoutRead,
|
||||
Update: testResourceTimeoutUpdate,
|
||||
Delete: testResourceTimeoutDelete,
|
||||
|
||||
// Due to the schema version also being stashed in the private/meta
|
||||
// data, we need to ensure that it does not overwrite the map
|
||||
// containing the timeouts.
|
||||
SchemaVersion: 1,
|
||||
|
||||
Timeouts: &schema.ResourceTimeout{
|
||||
Create: schema.DefaultTimeout(time.Second),
|
||||
Update: schema.DefaultTimeout(time.Second),
|
||||
Delete: schema.DefaultTimeout(time.Second),
|
||||
},
|
||||
|
||||
Importer: &schema.ResourceImporter{
|
||||
State: schema.ImportStatePassthrough,
|
||||
},
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"create_delay": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
"read_delay": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
"update_delay": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
"delete_delay": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func testResourceTimeoutCreate(d *schema.ResourceData, meta interface{}) error {
|
||||
delayString := d.Get("create_delay").(string)
|
||||
var delay time.Duration
|
||||
var err error
|
||||
if delayString != "" {
|
||||
delay, err = time.ParseDuration(delayString)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if delay > d.Timeout(schema.TimeoutCreate) {
|
||||
return fmt.Errorf("timeout while creating resource")
|
||||
}
|
||||
|
||||
d.SetId("testId")
|
||||
|
||||
return testResourceRead(d, meta)
|
||||
}
|
||||
|
||||
func testResourceTimeoutRead(d *schema.ResourceData, meta interface{}) error {
|
||||
delayString := d.Get("read_delay").(string)
|
||||
var delay time.Duration
|
||||
var err error
|
||||
if delayString != "" {
|
||||
delay, err = time.ParseDuration(delayString)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if delay > d.Timeout(schema.TimeoutRead) {
|
||||
return fmt.Errorf("timeout while reading resource")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func testResourceTimeoutUpdate(d *schema.ResourceData, meta interface{}) error {
|
||||
delayString := d.Get("update_delay").(string)
|
||||
var delay time.Duration
|
||||
var err error
|
||||
if delayString != "" {
|
||||
delay, err = time.ParseDuration(delayString)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if delay > d.Timeout(schema.TimeoutUpdate) {
|
||||
return fmt.Errorf("timeout while updating resource")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func testResourceTimeoutDelete(d *schema.ResourceData, meta interface{}) error {
|
||||
delayString := d.Get("delete_delay").(string)
|
||||
var delay time.Duration
|
||||
var err error
|
||||
if delayString != "" {
|
||||
delay, err = time.ParseDuration(delayString)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if delay > d.Timeout(schema.TimeoutDelete) {
|
||||
return fmt.Errorf("timeout while deleting resource")
|
||||
}
|
||||
|
||||
d.SetId("")
|
||||
return nil
|
||||
}
|
|
@ -1,158 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
)
|
||||
|
||||
func TestResourceTimeout_create(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_timeout" "foo" {
|
||||
create_delay = "2s"
|
||||
timeouts {
|
||||
create = "1s"
|
||||
}
|
||||
}
|
||||
`),
|
||||
ExpectError: regexp.MustCompile("timeout while creating resource"),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// start with the default, then modify it
|
||||
func TestResourceTimeout_defaults(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_timeout" "foo" {
|
||||
update_delay = "1ms"
|
||||
}
|
||||
`),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_timeout" "foo" {
|
||||
update_delay = "2ms"
|
||||
timeouts {
|
||||
update = "3s"
|
||||
}
|
||||
}
|
||||
`),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_timeout" "foo" {
|
||||
update_delay = "2s"
|
||||
delete_delay = "2s"
|
||||
timeouts {
|
||||
delete = "3s"
|
||||
update = "3s"
|
||||
}
|
||||
}
|
||||
`),
|
||||
},
|
||||
// delete "foo"
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_timeout" "bar" {
|
||||
}
|
||||
`),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceTimeout_delete(t *testing.T) {
|
||||
// If the delete timeout isn't saved until destroy, the cleanup here will
|
||||
// fail because the default is only 20m.
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_timeout" "foo" {
|
||||
delete_delay = "25m"
|
||||
timeouts {
|
||||
delete = "30m"
|
||||
}
|
||||
}
|
||||
`),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
func TestResourceTimeout_update(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_timeout" "foo" {
|
||||
update_delay = "1s"
|
||||
timeouts {
|
||||
update = "1s"
|
||||
}
|
||||
}
|
||||
`),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_timeout" "foo" {
|
||||
update_delay = "2s"
|
||||
timeouts {
|
||||
update = "1s"
|
||||
}
|
||||
}
|
||||
`),
|
||||
ExpectError: regexp.MustCompile("timeout while updating resource"),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceTimeout_read(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckResourceDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_timeout" "foo" {
|
||||
}
|
||||
`),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_timeout" "foo" {
|
||||
read_delay = "30m"
|
||||
}
|
||||
`),
|
||||
ExpectError: regexp.MustCompile("timeout while reading resource"),
|
||||
},
|
||||
// we need to remove the read_delay so that the resource can be
|
||||
// destroyed in the final step, but expect an error here from the
|
||||
// pre-existing delay.
|
||||
resource.TestStep{
|
||||
Config: strings.TrimSpace(`
|
||||
resource "test_resource_timeout" "foo" {
|
||||
}
|
||||
`),
|
||||
ExpectError: regexp.MustCompile("timeout while reading resource"),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
|
@ -1,30 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func testResourceUndeleteable() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Create: testResourceUndeleteableCreate,
|
||||
Read: testResourceUndeleteableRead,
|
||||
Delete: testResourceUndeleteableDelete,
|
||||
|
||||
Schema: map[string]*schema.Schema{},
|
||||
}
|
||||
}
|
||||
|
||||
func testResourceUndeleteableCreate(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId("placeholder")
|
||||
return testResourceUndeleteableRead(d, meta)
|
||||
}
|
||||
|
||||
func testResourceUndeleteableRead(d *schema.ResourceData, meta interface{}) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func testResourceUndeleteableDelete(d *schema.ResourceData, meta interface{}) error {
|
||||
return fmt.Errorf("test_undeleteable always fails deletion (use terraform state rm if you really want to delete it)")
|
||||
}
|
|
@ -1,154 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func testResourceCustomDiff() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Create: testResourceCustomDiffCreate,
|
||||
Read: testResourceCustomDiffRead,
|
||||
CustomizeDiff: testResourceCustomDiffCustomizeDiff,
|
||||
Update: testResourceCustomDiffUpdate,
|
||||
Delete: testResourceCustomDiffDelete,
|
||||
Schema: map[string]*schema.Schema{
|
||||
"required": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
"computed": {
|
||||
Type: schema.TypeInt,
|
||||
Computed: true,
|
||||
},
|
||||
"index": {
|
||||
Type: schema.TypeInt,
|
||||
Computed: true,
|
||||
},
|
||||
"veto": {
|
||||
Type: schema.TypeBool,
|
||||
Optional: true,
|
||||
},
|
||||
"list": {
|
||||
Type: schema.TypeList,
|
||||
Computed: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
type listDiffCases struct {
|
||||
Type string
|
||||
Value string
|
||||
}
|
||||
|
||||
func testListDiffCases(index int) []listDiffCases {
|
||||
switch index {
|
||||
case 0:
|
||||
return []listDiffCases{
|
||||
{
|
||||
Type: "add",
|
||||
Value: "dc1",
|
||||
},
|
||||
}
|
||||
case 1:
|
||||
return []listDiffCases{
|
||||
{
|
||||
Type: "remove",
|
||||
Value: "dc1",
|
||||
},
|
||||
{
|
||||
Type: "add",
|
||||
Value: "dc2",
|
||||
},
|
||||
{
|
||||
Type: "add",
|
||||
Value: "dc3",
|
||||
},
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func testListDiffCasesReadResult(index int) []interface{} {
|
||||
switch index {
|
||||
case 1:
|
||||
return []interface{}{"dc1"}
|
||||
default:
|
||||
return []interface{}{"dc2", "dc3"}
|
||||
}
|
||||
}
|
||||
|
||||
func testResourceCustomDiffCreate(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId("testId")
|
||||
|
||||
// Required must make it through to Create
|
||||
if _, ok := d.GetOk("required"); !ok {
|
||||
return fmt.Errorf("missing attribute 'required', but it's required")
|
||||
}
|
||||
|
||||
_, new := d.GetChange("computed")
|
||||
expected := new.(int) - 1
|
||||
actual := d.Get("index").(int)
|
||||
if expected != actual {
|
||||
return fmt.Errorf("expected computed to be 1 ahead of index, got computed: %d, index: %d", expected, actual)
|
||||
}
|
||||
d.Set("index", new)
|
||||
|
||||
return testResourceCustomDiffRead(d, meta)
|
||||
}
|
||||
|
||||
func testResourceCustomDiffRead(d *schema.ResourceData, meta interface{}) error {
|
||||
if err := d.Set("list", testListDiffCasesReadResult(d.Get("index").(int))); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func testResourceCustomDiffCustomizeDiff(d *schema.ResourceDiff, meta interface{}) error {
|
||||
if d.Get("veto").(bool) == true {
|
||||
return fmt.Errorf("veto is true, diff vetoed")
|
||||
}
|
||||
// Note that this gets put into state after the update, regardless of whether
|
||||
// or not anything is acted upon in the diff.
|
||||
d.SetNew("computed", d.Get("computed").(int)+1)
|
||||
|
||||
// This tests a diffed list, based off of the value of index
|
||||
dcs := testListDiffCases(d.Get("index").(int))
|
||||
s := d.Get("list").([]interface{})
|
||||
for _, dc := range dcs {
|
||||
switch dc.Type {
|
||||
case "add":
|
||||
s = append(s, dc.Value)
|
||||
case "remove":
|
||||
for i := range s {
|
||||
if s[i].(string) == dc.Value {
|
||||
copy(s[i:], s[i+1:])
|
||||
s = s[:len(s)-1]
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
d.SetNew("list", s)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func testResourceCustomDiffUpdate(d *schema.ResourceData, meta interface{}) error {
|
||||
_, new := d.GetChange("computed")
|
||||
expected := new.(int) - 1
|
||||
actual := d.Get("index").(int)
|
||||
if expected != actual {
|
||||
return fmt.Errorf("expected computed to be 1 ahead of index, got computed: %d, index: %d", expected, actual)
|
||||
}
|
||||
d.Set("index", new)
|
||||
return testResourceCustomDiffRead(d, meta)
|
||||
}
|
||||
|
||||
func testResourceCustomDiffDelete(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId("")
|
||||
return nil
|
||||
}
|
|
@ -1,53 +0,0 @@
|
|||
package test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
)
|
||||
|
||||
// TestResourceWithCustomDiff test custom diff behaviour.
|
||||
func TestResourceWithCustomDiff(t *testing.T) {
|
||||
resource.UnitTest(t, resource.TestCase{
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
Config: resourceWithCustomDiffConfig(false),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr("test_resource_with_custom_diff.foo", "computed", "1"),
|
||||
resource.TestCheckResourceAttr("test_resource_with_custom_diff.foo", "index", "1"),
|
||||
resource.TestCheckResourceAttr("test_resource_with_custom_diff.foo", "list.#", "1"),
|
||||
resource.TestCheckResourceAttr("test_resource_with_custom_diff.foo", "list.0", "dc1"),
|
||||
),
|
||||
ExpectNonEmptyPlan: true,
|
||||
},
|
||||
{
|
||||
Config: resourceWithCustomDiffConfig(false),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr("test_resource_with_custom_diff.foo", "computed", "2"),
|
||||
resource.TestCheckResourceAttr("test_resource_with_custom_diff.foo", "index", "2"),
|
||||
resource.TestCheckResourceAttr("test_resource_with_custom_diff.foo", "list.#", "2"),
|
||||
resource.TestCheckResourceAttr("test_resource_with_custom_diff.foo", "list.0", "dc2"),
|
||||
resource.TestCheckResourceAttr("test_resource_with_custom_diff.foo", "list.1", "dc3"),
|
||||
resource.TestCheckNoResourceAttr("test_resource_with_custom_diff.foo", "list.2"),
|
||||
),
|
||||
ExpectNonEmptyPlan: true,
|
||||
},
|
||||
{
|
||||
Config: resourceWithCustomDiffConfig(true),
|
||||
ExpectError: regexp.MustCompile("veto is true, diff vetoed"),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func resourceWithCustomDiffConfig(veto bool) string {
|
||||
return fmt.Sprintf(`
|
||||
resource "test_resource_with_custom_diff" "foo" {
|
||||
required = "yep"
|
||||
veto = %t
|
||||
}
|
||||
`, veto)
|
||||
}
|
|
@ -1,115 +0,0 @@
|
|||
package chef
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"path"
|
||||
"strings"
|
||||
|
||||
"github.com/hashicorp/terraform/communicator"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
)
|
||||
|
||||
const (
|
||||
chmod = "find %s -maxdepth 1 -type f -exec /bin/chmod %d {} +"
|
||||
installURL = "https://omnitruck.chef.io/install.sh"
|
||||
)
|
||||
|
||||
func (p *provisioner) linuxInstallChefClient(o terraform.UIOutput, comm communicator.Communicator) error {
|
||||
// Build up the command prefix
|
||||
prefix := ""
|
||||
if p.HTTPProxy != "" {
|
||||
prefix += fmt.Sprintf("http_proxy='%s' ", p.HTTPProxy)
|
||||
}
|
||||
if p.HTTPSProxy != "" {
|
||||
prefix += fmt.Sprintf("https_proxy='%s' ", p.HTTPSProxy)
|
||||
}
|
||||
if len(p.NOProxy) > 0 {
|
||||
prefix += fmt.Sprintf("no_proxy='%s' ", strings.Join(p.NOProxy, ","))
|
||||
}
|
||||
|
||||
// First download the install.sh script from Chef
|
||||
err := p.runCommand(o, comm, fmt.Sprintf("%scurl -LO %s", prefix, installURL))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Then execute the install.sh scrip to download and install Chef Client
|
||||
err = p.runCommand(o, comm, fmt.Sprintf("%sbash ./install.sh -v %q -c %s", prefix, p.Version, p.Channel))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// And finally cleanup the install.sh script again
|
||||
return p.runCommand(o, comm, fmt.Sprintf("%srm -f install.sh", prefix))
|
||||
}
|
||||
|
||||
func (p *provisioner) linuxCreateConfigFiles(o terraform.UIOutput, comm communicator.Communicator) error {
|
||||
// Make sure the config directory exists
|
||||
if err := p.runCommand(o, comm, "mkdir -p "+linuxConfDir); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Make sure we have enough rights to upload the files if using sudo
|
||||
if p.useSudo {
|
||||
if err := p.runCommand(o, comm, "chmod 777 "+linuxConfDir); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := p.runCommand(o, comm, fmt.Sprintf(chmod, linuxConfDir, 666)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := p.deployConfigFiles(o, comm, linuxConfDir); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(p.OhaiHints) > 0 {
|
||||
// Make sure the hits directory exists
|
||||
hintsDir := path.Join(linuxConfDir, "ohai/hints")
|
||||
if err := p.runCommand(o, comm, "mkdir -p "+hintsDir); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Make sure we have enough rights to upload the hints if using sudo
|
||||
if p.useSudo {
|
||||
if err := p.runCommand(o, comm, "chmod 777 "+hintsDir); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := p.runCommand(o, comm, fmt.Sprintf(chmod, hintsDir, 666)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := p.deployOhaiHints(o, comm, hintsDir); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// When done copying the hints restore the rights and make sure root is owner
|
||||
if p.useSudo {
|
||||
if err := p.runCommand(o, comm, "chmod 755 "+hintsDir); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := p.runCommand(o, comm, fmt.Sprintf(chmod, hintsDir, 600)); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := p.runCommand(o, comm, "chown -R root:root "+hintsDir); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// When done copying all files restore the rights and make sure root is owner
|
||||
if p.useSudo {
|
||||
if err := p.runCommand(o, comm, "chmod 755 "+linuxConfDir); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := p.runCommand(o, comm, fmt.Sprintf(chmod, linuxConfDir, 600)); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := p.runCommand(o, comm, "chown -R root:root "+linuxConfDir); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
|
@ -1,330 +0,0 @@
|
|||
package chef
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"path"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/communicator"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
)
|
||||
|
||||
func TestResourceProvider_linuxInstallChefClient(t *testing.T) {
|
||||
cases := map[string]struct {
|
||||
Config map[string]interface{}
|
||||
Commands map[string]bool
|
||||
}{
|
||||
"Sudo": {
|
||||
Config: map[string]interface{}{
|
||||
"node_name": "nodename1",
|
||||
"run_list": []interface{}{"cookbook::recipe"},
|
||||
"server_url": "https://chef.local",
|
||||
"user_name": "bob",
|
||||
"user_key": "USER-KEY",
|
||||
},
|
||||
|
||||
Commands: map[string]bool{
|
||||
"sudo curl -LO https://omnitruck.chef.io/install.sh": true,
|
||||
"sudo bash ./install.sh -v \"\" -c stable": true,
|
||||
"sudo rm -f install.sh": true,
|
||||
},
|
||||
},
|
||||
|
||||
"NoSudo": {
|
||||
Config: map[string]interface{}{
|
||||
"node_name": "nodename1",
|
||||
"prevent_sudo": true,
|
||||
"run_list": []interface{}{"cookbook::recipe"},
|
||||
"secret_key": "SECRET-KEY",
|
||||
"server_url": "https://chef.local",
|
||||
"user_name": "bob",
|
||||
"user_key": "USER-KEY",
|
||||
},
|
||||
|
||||
Commands: map[string]bool{
|
||||
"curl -LO https://omnitruck.chef.io/install.sh": true,
|
||||
"bash ./install.sh -v \"\" -c stable": true,
|
||||
"rm -f install.sh": true,
|
||||
},
|
||||
},
|
||||
|
||||
"HTTPProxy": {
|
||||
Config: map[string]interface{}{
|
||||
"http_proxy": "http://proxy.local",
|
||||
"node_name": "nodename1",
|
||||
"prevent_sudo": true,
|
||||
"run_list": []interface{}{"cookbook::recipe"},
|
||||
"server_url": "https://chef.local",
|
||||
"user_name": "bob",
|
||||
"user_key": "USER-KEY",
|
||||
},
|
||||
|
||||
Commands: map[string]bool{
|
||||
"http_proxy='http://proxy.local' curl -LO https://omnitruck.chef.io/install.sh": true,
|
||||
"http_proxy='http://proxy.local' bash ./install.sh -v \"\" -c stable": true,
|
||||
"http_proxy='http://proxy.local' rm -f install.sh": true,
|
||||
},
|
||||
},
|
||||
|
||||
"HTTPSProxy": {
|
||||
Config: map[string]interface{}{
|
||||
"https_proxy": "https://proxy.local",
|
||||
"node_name": "nodename1",
|
||||
"prevent_sudo": true,
|
||||
"run_list": []interface{}{"cookbook::recipe"},
|
||||
"server_url": "https://chef.local",
|
||||
"user_name": "bob",
|
||||
"user_key": "USER-KEY",
|
||||
},
|
||||
|
||||
Commands: map[string]bool{
|
||||
"https_proxy='https://proxy.local' curl -LO https://omnitruck.chef.io/install.sh": true,
|
||||
"https_proxy='https://proxy.local' bash ./install.sh -v \"\" -c stable": true,
|
||||
"https_proxy='https://proxy.local' rm -f install.sh": true,
|
||||
},
|
||||
},
|
||||
|
||||
"NoProxy": {
|
||||
Config: map[string]interface{}{
|
||||
"http_proxy": "http://proxy.local",
|
||||
"no_proxy": []interface{}{"http://local.local", "http://local.org"},
|
||||
"node_name": "nodename1",
|
||||
"prevent_sudo": true,
|
||||
"run_list": []interface{}{"cookbook::recipe"},
|
||||
"server_url": "https://chef.local",
|
||||
"user_name": "bob",
|
||||
"user_key": "USER-KEY",
|
||||
},
|
||||
|
||||
Commands: map[string]bool{
|
||||
"http_proxy='http://proxy.local' no_proxy='http://local.local,http://local.org' " +
|
||||
"curl -LO https://omnitruck.chef.io/install.sh": true,
|
||||
"http_proxy='http://proxy.local' no_proxy='http://local.local,http://local.org' " +
|
||||
"bash ./install.sh -v \"\" -c stable": true,
|
||||
"http_proxy='http://proxy.local' no_proxy='http://local.local,http://local.org' " +
|
||||
"rm -f install.sh": true,
|
||||
},
|
||||
},
|
||||
|
||||
"Version": {
|
||||
Config: map[string]interface{}{
|
||||
"node_name": "nodename1",
|
||||
"prevent_sudo": true,
|
||||
"run_list": []interface{}{"cookbook::recipe"},
|
||||
"server_url": "https://chef.local",
|
||||
"user_name": "bob",
|
||||
"user_key": "USER-KEY",
|
||||
"version": "11.18.6",
|
||||
},
|
||||
|
||||
Commands: map[string]bool{
|
||||
"curl -LO https://omnitruck.chef.io/install.sh": true,
|
||||
"bash ./install.sh -v \"11.18.6\" -c stable": true,
|
||||
"rm -f install.sh": true,
|
||||
},
|
||||
},
|
||||
|
||||
"Channel": {
|
||||
Config: map[string]interface{}{
|
||||
"channel": "current",
|
||||
"node_name": "nodename1",
|
||||
"prevent_sudo": true,
|
||||
"run_list": []interface{}{"cookbook::recipe"},
|
||||
"server_url": "https://chef.local",
|
||||
"user_name": "bob",
|
||||
"user_key": "USER-KEY",
|
||||
"version": "11.18.6",
|
||||
},
|
||||
|
||||
Commands: map[string]bool{
|
||||
"curl -LO https://omnitruck.chef.io/install.sh": true,
|
||||
"bash ./install.sh -v \"11.18.6\" -c current": true,
|
||||
"rm -f install.sh": true,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
o := new(terraform.MockUIOutput)
|
||||
c := new(communicator.MockCommunicator)
|
||||
|
||||
for k, tc := range cases {
|
||||
c.Commands = tc.Commands
|
||||
|
||||
p, err := decodeConfig(
|
||||
schema.TestResourceDataRaw(t, Provisioner().(*schema.Provisioner).Schema, tc.Config),
|
||||
)
|
||||
if err != nil {
|
||||
t.Fatalf("Error: %v", err)
|
||||
}
|
||||
|
||||
p.useSudo = !p.PreventSudo
|
||||
|
||||
err = p.linuxInstallChefClient(o, c)
|
||||
if err != nil {
|
||||
t.Fatalf("Test %q failed: %v", k, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestResourceProvider_linuxCreateConfigFiles(t *testing.T) {
|
||||
cases := map[string]struct {
|
||||
Config map[string]interface{}
|
||||
Commands map[string]bool
|
||||
Uploads map[string]string
|
||||
}{
|
||||
"Sudo": {
|
||||
Config: map[string]interface{}{
|
||||
"ohai_hints": []interface{}{"testdata/ohaihint.json"},
|
||||
"node_name": "nodename1",
|
||||
"run_list": []interface{}{"cookbook::recipe"},
|
||||
"secret_key": "SECRET-KEY",
|
||||
"server_url": "https://chef.local",
|
||||
"user_name": "bob",
|
||||
"user_key": "USER-KEY",
|
||||
},
|
||||
|
||||
Commands: map[string]bool{
|
||||
"sudo mkdir -p " + linuxConfDir: true,
|
||||
"sudo chmod 777 " + linuxConfDir: true,
|
||||
"sudo " + fmt.Sprintf(chmod, linuxConfDir, 666): true,
|
||||
"sudo mkdir -p " + path.Join(linuxConfDir, "ohai/hints"): true,
|
||||
"sudo chmod 777 " + path.Join(linuxConfDir, "ohai/hints"): true,
|
||||
"sudo " + fmt.Sprintf(chmod, path.Join(linuxConfDir, "ohai/hints"), 666): true,
|
||||
"sudo chmod 755 " + path.Join(linuxConfDir, "ohai/hints"): true,
|
||||
"sudo " + fmt.Sprintf(chmod, path.Join(linuxConfDir, "ohai/hints"), 600): true,
|
||||
"sudo chown -R root:root " + path.Join(linuxConfDir, "ohai/hints"): true,
|
||||
"sudo chmod 755 " + linuxConfDir: true,
|
||||
"sudo " + fmt.Sprintf(chmod, linuxConfDir, 600): true,
|
||||
"sudo chown -R root:root " + linuxConfDir: true,
|
||||
},
|
||||
|
||||
Uploads: map[string]string{
|
||||
linuxConfDir + "/client.rb": defaultLinuxClientConf,
|
||||
linuxConfDir + "/encrypted_data_bag_secret": "SECRET-KEY",
|
||||
linuxConfDir + "/first-boot.json": `{"run_list":["cookbook::recipe"]}`,
|
||||
linuxConfDir + "/ohai/hints/ohaihint.json": "OHAI-HINT-FILE",
|
||||
linuxConfDir + "/bob.pem": "USER-KEY",
|
||||
},
|
||||
},
|
||||
|
||||
"NoSudo": {
|
||||
Config: map[string]interface{}{
|
||||
"node_name": "nodename1",
|
||||
"prevent_sudo": true,
|
||||
"run_list": []interface{}{"cookbook::recipe"},
|
||||
"secret_key": "SECRET-KEY",
|
||||
"server_url": "https://chef.local",
|
||||
"user_name": "bob",
|
||||
"user_key": "USER-KEY",
|
||||
},
|
||||
|
||||
Commands: map[string]bool{
|
||||
"mkdir -p " + linuxConfDir: true,
|
||||
},
|
||||
|
||||
Uploads: map[string]string{
|
||||
linuxConfDir + "/client.rb": defaultLinuxClientConf,
|
||||
linuxConfDir + "/encrypted_data_bag_secret": "SECRET-KEY",
|
||||
linuxConfDir + "/first-boot.json": `{"run_list":["cookbook::recipe"]}`,
|
||||
linuxConfDir + "/bob.pem": "USER-KEY",
|
||||
},
|
||||
},
|
||||
|
||||
"Proxy": {
|
||||
Config: map[string]interface{}{
|
||||
"http_proxy": "http://proxy.local",
|
||||
"https_proxy": "https://proxy.local",
|
||||
"no_proxy": []interface{}{"http://local.local", "https://local.local"},
|
||||
"node_name": "nodename1",
|
||||
"prevent_sudo": true,
|
||||
"run_list": []interface{}{"cookbook::recipe"},
|
||||
"secret_key": "SECRET-KEY",
|
||||
"server_url": "https://chef.local",
|
||||
"ssl_verify_mode": "verify_none",
|
||||
"user_name": "bob",
|
||||
"user_key": "USER-KEY",
|
||||
},
|
||||
|
||||
Commands: map[string]bool{
|
||||
"mkdir -p " + linuxConfDir: true,
|
||||
},
|
||||
|
||||
Uploads: map[string]string{
|
||||
linuxConfDir + "/client.rb": proxyLinuxClientConf,
|
||||
linuxConfDir + "/encrypted_data_bag_secret": "SECRET-KEY",
|
||||
linuxConfDir + "/first-boot.json": `{"run_list":["cookbook::recipe"]}`,
|
||||
linuxConfDir + "/bob.pem": "USER-KEY",
|
||||
},
|
||||
},
|
||||
|
||||
"Attributes JSON": {
|
||||
Config: map[string]interface{}{
|
||||
"attributes_json": `{"key1":{"subkey1":{"subkey2a":["val1","val2","val3"],` +
|
||||
`"subkey2b":{"subkey3":"value3"}}},"key2":"value2"}`,
|
||||
"node_name": "nodename1",
|
||||
"prevent_sudo": true,
|
||||
"run_list": []interface{}{"cookbook::recipe"},
|
||||
"secret_key": "SECRET-KEY",
|
||||
"server_url": "https://chef.local",
|
||||
"user_name": "bob",
|
||||
"user_key": "USER-KEY",
|
||||
},
|
||||
|
||||
Commands: map[string]bool{
|
||||
"mkdir -p " + linuxConfDir: true,
|
||||
},
|
||||
|
||||
Uploads: map[string]string{
|
||||
linuxConfDir + "/client.rb": defaultLinuxClientConf,
|
||||
linuxConfDir + "/encrypted_data_bag_secret": "SECRET-KEY",
|
||||
linuxConfDir + "/bob.pem": "USER-KEY",
|
||||
linuxConfDir + "/first-boot.json": `{"key1":{"subkey1":{"subkey2a":["val1","val2","val3"],` +
|
||||
`"subkey2b":{"subkey3":"value3"}}},"key2":"value2","run_list":["cookbook::recipe"]}`,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
o := new(terraform.MockUIOutput)
|
||||
c := new(communicator.MockCommunicator)
|
||||
|
||||
for k, tc := range cases {
|
||||
c.Commands = tc.Commands
|
||||
c.Uploads = tc.Uploads
|
||||
|
||||
p, err := decodeConfig(
|
||||
schema.TestResourceDataRaw(t, Provisioner().(*schema.Provisioner).Schema, tc.Config),
|
||||
)
|
||||
if err != nil {
|
||||
t.Fatalf("Error: %v", err)
|
||||
}
|
||||
|
||||
p.useSudo = !p.PreventSudo
|
||||
|
||||
err = p.linuxCreateConfigFiles(o, c)
|
||||
if err != nil {
|
||||
t.Fatalf("Test %q failed: %v", k, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const defaultLinuxClientConf = `log_location STDOUT
|
||||
chef_server_url "https://chef.local/"
|
||||
node_name "nodename1"`
|
||||
|
||||
const proxyLinuxClientConf = `log_location STDOUT
|
||||
chef_server_url "https://chef.local/"
|
||||
node_name "nodename1"
|
||||
|
||||
http_proxy "http://proxy.local"
|
||||
ENV['http_proxy'] = "http://proxy.local"
|
||||
ENV['HTTP_PROXY'] = "http://proxy.local"
|
||||
|
||||
https_proxy "https://proxy.local"
|
||||
ENV['https_proxy'] = "https://proxy.local"
|
||||
ENV['HTTPS_PROXY'] = "https://proxy.local"
|
||||
|
||||
no_proxy "http://local.local,https://local.local"
|
||||
ENV['no_proxy'] = "http://local.local,https://local.local"
|
||||
|
||||
ssl_verify_mode :verify_none`
|
|
@ -1,904 +0,0 @@
|
|||
package chef
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"path"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"text/template"
|
||||
"time"
|
||||
|
||||
"github.com/hashicorp/terraform/communicator"
|
||||
"github.com/hashicorp/terraform/communicator/remote"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
"github.com/mitchellh/go-homedir"
|
||||
"github.com/mitchellh/go-linereader"
|
||||
)
|
||||
|
||||
const (
|
||||
clienrb = "client.rb"
|
||||
defaultEnv = "_default"
|
||||
firstBoot = "first-boot.json"
|
||||
logfileDir = "logfiles"
|
||||
linuxChefCmd = "chef-client"
|
||||
linuxConfDir = "/etc/chef"
|
||||
linuxNoOutput = "> /dev/null 2>&1"
|
||||
linuxGemCmd = "/opt/chef/embedded/bin/gem"
|
||||
linuxKnifeCmd = "knife"
|
||||
secretKey = "encrypted_data_bag_secret"
|
||||
windowsChefCmd = "cmd /c chef-client"
|
||||
windowsConfDir = "C:/chef"
|
||||
windowsNoOutput = "> nul 2>&1"
|
||||
windowsGemCmd = "C:/opscode/chef/embedded/bin/gem"
|
||||
windowsKnifeCmd = "cmd /c knife"
|
||||
)
|
||||
|
||||
const clientConf = `
|
||||
log_location STDOUT
|
||||
chef_server_url "{{ .ServerURL }}"
|
||||
node_name "{{ .NodeName }}"
|
||||
{{ if .UsePolicyfile }}
|
||||
use_policyfile true
|
||||
policy_group "{{ .PolicyGroup }}"
|
||||
policy_name "{{ .PolicyName }}"
|
||||
{{ end -}}
|
||||
|
||||
{{ if .HTTPProxy }}
|
||||
http_proxy "{{ .HTTPProxy }}"
|
||||
ENV['http_proxy'] = "{{ .HTTPProxy }}"
|
||||
ENV['HTTP_PROXY'] = "{{ .HTTPProxy }}"
|
||||
{{ end -}}
|
||||
|
||||
{{ if .HTTPSProxy }}
|
||||
https_proxy "{{ .HTTPSProxy }}"
|
||||
ENV['https_proxy'] = "{{ .HTTPSProxy }}"
|
||||
ENV['HTTPS_PROXY'] = "{{ .HTTPSProxy }}"
|
||||
{{ end -}}
|
||||
|
||||
{{ if .NOProxy }}
|
||||
no_proxy "{{ join .NOProxy "," }}"
|
||||
ENV['no_proxy'] = "{{ join .NOProxy "," }}"
|
||||
{{ end -}}
|
||||
|
||||
{{ if .SSLVerifyMode }}
|
||||
ssl_verify_mode {{ .SSLVerifyMode }}
|
||||
{{- end -}}
|
||||
|
||||
{{ if .DisableReporting }}
|
||||
enable_reporting false
|
||||
{{ end -}}
|
||||
|
||||
{{ if .ClientOptions }}
|
||||
{{ join .ClientOptions "\n" }}
|
||||
{{ end }}
|
||||
`
|
||||
|
||||
type provisionFn func(terraform.UIOutput, communicator.Communicator) error
|
||||
|
||||
type provisioner struct {
|
||||
Attributes map[string]interface{}
|
||||
Channel string
|
||||
ClientOptions []string
|
||||
DisableReporting bool
|
||||
Environment string
|
||||
FetchChefCertificates bool
|
||||
LogToFile bool
|
||||
UsePolicyfile bool
|
||||
PolicyGroup string
|
||||
PolicyName string
|
||||
HTTPProxy string
|
||||
HTTPSProxy string
|
||||
MaxRetries int
|
||||
NamedRunList string
|
||||
NOProxy []string
|
||||
NodeName string
|
||||
OhaiHints []string
|
||||
OSType string
|
||||
RecreateClient bool
|
||||
PreventSudo bool
|
||||
RetryOnExitCode map[int]bool
|
||||
RunList []string
|
||||
SecretKey string
|
||||
ServerURL string
|
||||
SkipInstall bool
|
||||
SkipRegister bool
|
||||
SSLVerifyMode string
|
||||
UserName string
|
||||
UserKey string
|
||||
Vaults map[string][]string
|
||||
Version string
|
||||
WaitForRetry time.Duration
|
||||
|
||||
cleanupUserKeyCmd string
|
||||
createConfigFiles provisionFn
|
||||
installChefClient provisionFn
|
||||
fetchChefCertificates provisionFn
|
||||
generateClientKey provisionFn
|
||||
configureVaults provisionFn
|
||||
runChefClient provisionFn
|
||||
useSudo bool
|
||||
}
|
||||
|
||||
// Provisioner returns a Chef provisioner
|
||||
func Provisioner() terraform.ResourceProvisioner {
|
||||
return &schema.Provisioner{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"node_name": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
"server_url": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
"user_name": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
"user_key": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
|
||||
"attributes_json": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
"channel": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Default: "stable",
|
||||
},
|
||||
"client_options": &schema.Schema{
|
||||
Type: schema.TypeList,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
Optional: true,
|
||||
},
|
||||
"disable_reporting": &schema.Schema{
|
||||
Type: schema.TypeBool,
|
||||
Optional: true,
|
||||
},
|
||||
"environment": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Default: defaultEnv,
|
||||
},
|
||||
"fetch_chef_certificates": &schema.Schema{
|
||||
Type: schema.TypeBool,
|
||||
Optional: true,
|
||||
},
|
||||
"log_to_file": &schema.Schema{
|
||||
Type: schema.TypeBool,
|
||||
Optional: true,
|
||||
},
|
||||
"use_policyfile": &schema.Schema{
|
||||
Type: schema.TypeBool,
|
||||
Optional: true,
|
||||
},
|
||||
"policy_group": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
"policy_name": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
"http_proxy": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
"https_proxy": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
"max_retries": &schema.Schema{
|
||||
Type: schema.TypeInt,
|
||||
Optional: true,
|
||||
Default: 0,
|
||||
},
|
||||
"no_proxy": &schema.Schema{
|
||||
Type: schema.TypeList,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
Optional: true,
|
||||
},
|
||||
"named_run_list": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
"ohai_hints": &schema.Schema{
|
||||
Type: schema.TypeList,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
Optional: true,
|
||||
},
|
||||
"os_type": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
"prevent_sudo": &schema.Schema{
|
||||
Type: schema.TypeBool,
|
||||
Optional: true,
|
||||
},
|
||||
"recreate_client": &schema.Schema{
|
||||
Type: schema.TypeBool,
|
||||
Optional: true,
|
||||
},
|
||||
"retry_on_exit_code": &schema.Schema{
|
||||
Type: schema.TypeList,
|
||||
Elem: &schema.Schema{Type: schema.TypeInt},
|
||||
Optional: true,
|
||||
},
|
||||
"run_list": &schema.Schema{
|
||||
Type: schema.TypeList,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
Optional: true,
|
||||
},
|
||||
"secret_key": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
"skip_install": &schema.Schema{
|
||||
Type: schema.TypeBool,
|
||||
Optional: true,
|
||||
},
|
||||
"skip_register": &schema.Schema{
|
||||
Type: schema.TypeBool,
|
||||
Optional: true,
|
||||
},
|
||||
"ssl_verify_mode": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
"vault_json": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
"version": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
"wait_for_retry": &schema.Schema{
|
||||
Type: schema.TypeInt,
|
||||
Optional: true,
|
||||
Default: 30,
|
||||
},
|
||||
},
|
||||
|
||||
ApplyFunc: applyFn,
|
||||
ValidateFunc: validateFn,
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Support context cancelling (Provisioner Stop)
|
||||
func applyFn(ctx context.Context) error {
|
||||
o := ctx.Value(schema.ProvOutputKey).(terraform.UIOutput)
|
||||
s := ctx.Value(schema.ProvRawStateKey).(*terraform.InstanceState)
|
||||
d := ctx.Value(schema.ProvConfigDataKey).(*schema.ResourceData)
|
||||
|
||||
// Decode the provisioner config
|
||||
p, err := decodeConfig(d)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if p.OSType == "" {
|
||||
switch t := s.Ephemeral.ConnInfo["type"]; t {
|
||||
case "ssh", "": // The default connection type is ssh, so if the type is empty assume ssh
|
||||
p.OSType = "linux"
|
||||
case "winrm":
|
||||
p.OSType = "windows"
|
||||
default:
|
||||
return fmt.Errorf("Unsupported connection type: %s", t)
|
||||
}
|
||||
}
|
||||
|
||||
// Set some values based on the targeted OS
|
||||
switch p.OSType {
|
||||
case "linux":
|
||||
p.cleanupUserKeyCmd = fmt.Sprintf("rm -f %s", path.Join(linuxConfDir, p.UserName+".pem"))
|
||||
p.createConfigFiles = p.linuxCreateConfigFiles
|
||||
p.installChefClient = p.linuxInstallChefClient
|
||||
p.fetchChefCertificates = p.fetchChefCertificatesFunc(linuxKnifeCmd, linuxConfDir)
|
||||
p.generateClientKey = p.generateClientKeyFunc(linuxKnifeCmd, linuxConfDir, linuxNoOutput)
|
||||
p.configureVaults = p.configureVaultsFunc(linuxGemCmd, linuxKnifeCmd, linuxConfDir)
|
||||
p.runChefClient = p.runChefClientFunc(linuxChefCmd, linuxConfDir)
|
||||
p.useSudo = !p.PreventSudo && s.Ephemeral.ConnInfo["user"] != "root"
|
||||
case "windows":
|
||||
p.cleanupUserKeyCmd = fmt.Sprintf("cd %s && del /F /Q %s", windowsConfDir, p.UserName+".pem")
|
||||
p.createConfigFiles = p.windowsCreateConfigFiles
|
||||
p.installChefClient = p.windowsInstallChefClient
|
||||
p.fetchChefCertificates = p.fetchChefCertificatesFunc(windowsKnifeCmd, windowsConfDir)
|
||||
p.generateClientKey = p.generateClientKeyFunc(windowsKnifeCmd, windowsConfDir, windowsNoOutput)
|
||||
p.configureVaults = p.configureVaultsFunc(windowsGemCmd, windowsKnifeCmd, windowsConfDir)
|
||||
p.runChefClient = p.runChefClientFunc(windowsChefCmd, windowsConfDir)
|
||||
p.useSudo = false
|
||||
default:
|
||||
return fmt.Errorf("Unsupported os type: %s", p.OSType)
|
||||
}
|
||||
|
||||
// Get a new communicator
|
||||
comm, err := communicator.New(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
retryCtx, cancel := context.WithTimeout(ctx, comm.Timeout())
|
||||
defer cancel()
|
||||
|
||||
// Wait and retry until we establish the connection
|
||||
err = communicator.Retry(retryCtx, func() error {
|
||||
return comm.Connect(o)
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer comm.Disconnect()
|
||||
|
||||
// Make sure we always delete the user key from the new node!
|
||||
var once sync.Once
|
||||
cleanupUserKey := func() {
|
||||
o.Output("Cleanup user key...")
|
||||
if err := p.runCommand(o, comm, p.cleanupUserKeyCmd); err != nil {
|
||||
o.Output("WARNING: Failed to cleanup user key on new node: " + err.Error())
|
||||
}
|
||||
}
|
||||
defer once.Do(cleanupUserKey)
|
||||
|
||||
if !p.SkipInstall {
|
||||
if err := p.installChefClient(o, comm); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
o.Output("Creating configuration files...")
|
||||
if err := p.createConfigFiles(o, comm); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if !p.SkipRegister {
|
||||
if p.FetchChefCertificates {
|
||||
o.Output("Fetch Chef certificates...")
|
||||
if err := p.fetchChefCertificates(o, comm); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
o.Output("Generate the private key...")
|
||||
if err := p.generateClientKey(o, comm); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if p.Vaults != nil {
|
||||
o.Output("Configure Chef vaults...")
|
||||
if err := p.configureVaults(o, comm); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Cleanup the user key before we run Chef-Client to prevent issues
|
||||
// with rights caused by changing settings during the run.
|
||||
once.Do(cleanupUserKey)
|
||||
|
||||
o.Output("Starting initial Chef-Client run...")
|
||||
|
||||
for attempt := 0; attempt <= p.MaxRetries; attempt++ {
|
||||
// We need a new retry context for each attempt, to make sure
|
||||
// they all get the correct timeout.
|
||||
retryCtx, cancel := context.WithTimeout(ctx, comm.Timeout())
|
||||
defer cancel()
|
||||
|
||||
// Make sure to (re)connect before trying to run Chef-Client.
|
||||
if err := communicator.Retry(retryCtx, func() error {
|
||||
return comm.Connect(o)
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = p.runChefClient(o, comm)
|
||||
if err == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Allow RFC062 Exit Codes:
|
||||
// https://github.com/chef/chef-rfc/blob/master/rfc062-exit-status.md
|
||||
exitError, ok := err.(*remote.ExitError)
|
||||
if !ok {
|
||||
return err
|
||||
}
|
||||
|
||||
switch exitError.ExitStatus {
|
||||
case 35:
|
||||
o.Output("Reboot has been scheduled in the run state")
|
||||
err = nil
|
||||
case 37:
|
||||
o.Output("Reboot needs to be completed")
|
||||
err = nil
|
||||
case 213:
|
||||
o.Output("Chef has exited during a client upgrade")
|
||||
err = nil
|
||||
}
|
||||
|
||||
if !p.RetryOnExitCode[exitError.ExitStatus] {
|
||||
return err
|
||||
}
|
||||
|
||||
if attempt < p.MaxRetries {
|
||||
o.Output(fmt.Sprintf("Waiting %s before retrying Chef-Client run...", p.WaitForRetry))
|
||||
time.Sleep(p.WaitForRetry)
|
||||
}
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func validateFn(c *terraform.ResourceConfig) (ws []string, es []error) {
|
||||
usePolicyFile := false
|
||||
if usePolicyFileRaw, ok := c.Get("use_policyfile"); ok {
|
||||
switch usePolicyFileRaw := usePolicyFileRaw.(type) {
|
||||
case bool:
|
||||
usePolicyFile = usePolicyFileRaw
|
||||
case string:
|
||||
usePolicyFileBool, err := strconv.ParseBool(usePolicyFileRaw)
|
||||
if err != nil {
|
||||
return ws, append(es, errors.New("\"use_policyfile\" must be a boolean"))
|
||||
}
|
||||
usePolicyFile = usePolicyFileBool
|
||||
default:
|
||||
return ws, append(es, errors.New("\"use_policyfile\" must be a boolean"))
|
||||
}
|
||||
}
|
||||
|
||||
if !usePolicyFile && !c.IsSet("run_list") {
|
||||
es = append(es, errors.New("\"run_list\": required field is not set"))
|
||||
}
|
||||
if usePolicyFile && !c.IsSet("policy_name") {
|
||||
es = append(es, errors.New("using policyfile, but \"policy_name\" not set"))
|
||||
}
|
||||
if usePolicyFile && !c.IsSet("policy_group") {
|
||||
es = append(es, errors.New("using policyfile, but \"policy_group\" not set"))
|
||||
}
|
||||
|
||||
return ws, es
|
||||
}
|
||||
|
||||
func (p *provisioner) deployConfigFiles(o terraform.UIOutput, comm communicator.Communicator, confDir string) error {
|
||||
// Copy the user key to the new instance
|
||||
pk := strings.NewReader(p.UserKey)
|
||||
if err := comm.Upload(path.Join(confDir, p.UserName+".pem"), pk); err != nil {
|
||||
return fmt.Errorf("Uploading user key failed: %v", err)
|
||||
}
|
||||
|
||||
if p.SecretKey != "" {
|
||||
// Copy the secret key to the new instance
|
||||
s := strings.NewReader(p.SecretKey)
|
||||
if err := comm.Upload(path.Join(confDir, secretKey), s); err != nil {
|
||||
return fmt.Errorf("Uploading %s failed: %v", secretKey, err)
|
||||
}
|
||||
}
|
||||
|
||||
// Make sure the SSLVerifyMode value is written as a symbol
|
||||
if p.SSLVerifyMode != "" && !strings.HasPrefix(p.SSLVerifyMode, ":") {
|
||||
p.SSLVerifyMode = fmt.Sprintf(":%s", p.SSLVerifyMode)
|
||||
}
|
||||
|
||||
// Make strings.Join available for use within the template
|
||||
funcMap := template.FuncMap{
|
||||
"join": strings.Join,
|
||||
}
|
||||
|
||||
// Create a new template and parse the client config into it
|
||||
t := template.Must(template.New(clienrb).Funcs(funcMap).Parse(clientConf))
|
||||
|
||||
var buf bytes.Buffer
|
||||
err := t.Execute(&buf, p)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error executing %s template: %s", clienrb, err)
|
||||
}
|
||||
|
||||
// Copy the client config to the new instance
|
||||
if err = comm.Upload(path.Join(confDir, clienrb), &buf); err != nil {
|
||||
return fmt.Errorf("Uploading %s failed: %v", clienrb, err)
|
||||
}
|
||||
|
||||
// Create a map with first boot settings
|
||||
fb := make(map[string]interface{})
|
||||
if p.Attributes != nil {
|
||||
fb = p.Attributes
|
||||
}
|
||||
|
||||
// Check if the run_list was also in the attributes and if so log a warning
|
||||
// that it will be overwritten with the value of the run_list argument.
|
||||
if _, found := fb["run_list"]; found {
|
||||
log.Printf("[WARN] Found a 'run_list' specified in the configured attributes! " +
|
||||
"This value will be overwritten by the value of the `run_list` argument!")
|
||||
}
|
||||
|
||||
// Add the initial runlist to the first boot settings
|
||||
if !p.UsePolicyfile {
|
||||
fb["run_list"] = p.RunList
|
||||
}
|
||||
|
||||
// Marshal the first boot settings to JSON
|
||||
d, err := json.Marshal(fb)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to create %s data: %s", firstBoot, err)
|
||||
}
|
||||
|
||||
// Copy the first-boot.json to the new instance
|
||||
if err := comm.Upload(path.Join(confDir, firstBoot), bytes.NewReader(d)); err != nil {
|
||||
return fmt.Errorf("Uploading %s failed: %v", firstBoot, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *provisioner) deployOhaiHints(o terraform.UIOutput, comm communicator.Communicator, hintDir string) error {
|
||||
for _, hint := range p.OhaiHints {
|
||||
// Open the hint file
|
||||
f, err := os.Open(hint)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
// Copy the hint to the new instance
|
||||
if err := comm.Upload(path.Join(hintDir, path.Base(hint)), f); err != nil {
|
||||
return fmt.Errorf("Uploading %s failed: %v", path.Base(hint), err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *provisioner) fetchChefCertificatesFunc(
|
||||
knifeCmd string,
|
||||
confDir string) func(terraform.UIOutput, communicator.Communicator) error {
|
||||
return func(o terraform.UIOutput, comm communicator.Communicator) error {
|
||||
clientrb := path.Join(confDir, clienrb)
|
||||
cmd := fmt.Sprintf("%s ssl fetch -c %s", knifeCmd, clientrb)
|
||||
|
||||
return p.runCommand(o, comm, cmd)
|
||||
}
|
||||
}
|
||||
|
||||
func (p *provisioner) generateClientKeyFunc(knifeCmd string, confDir string, noOutput string) provisionFn {
|
||||
return func(o terraform.UIOutput, comm communicator.Communicator) error {
|
||||
options := fmt.Sprintf("-c %s -u %s --key %s",
|
||||
path.Join(confDir, clienrb),
|
||||
p.UserName,
|
||||
path.Join(confDir, p.UserName+".pem"),
|
||||
)
|
||||
|
||||
// See if we already have a node object
|
||||
getNodeCmd := fmt.Sprintf("%s node show %s %s %s", knifeCmd, p.NodeName, options, noOutput)
|
||||
node := p.runCommand(o, comm, getNodeCmd) == nil
|
||||
|
||||
// See if we already have a client object
|
||||
getClientCmd := fmt.Sprintf("%s client show %s %s %s", knifeCmd, p.NodeName, options, noOutput)
|
||||
client := p.runCommand(o, comm, getClientCmd) == nil
|
||||
|
||||
// If we have a client, we can only continue if we are to recreate the client
|
||||
if client && !p.RecreateClient {
|
||||
return fmt.Errorf(
|
||||
"Chef client %q already exists, set recreate_client=true to automatically recreate the client", p.NodeName)
|
||||
}
|
||||
|
||||
// If the node exists, try to delete it
|
||||
if node {
|
||||
deleteNodeCmd := fmt.Sprintf("%s node delete %s -y %s",
|
||||
knifeCmd,
|
||||
p.NodeName,
|
||||
options,
|
||||
)
|
||||
if err := p.runCommand(o, comm, deleteNodeCmd); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// If the client exists, try to delete it
|
||||
if client {
|
||||
deleteClientCmd := fmt.Sprintf("%s client delete %s -y %s",
|
||||
knifeCmd,
|
||||
p.NodeName,
|
||||
options,
|
||||
)
|
||||
if err := p.runCommand(o, comm, deleteClientCmd); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Create the new client object
|
||||
createClientCmd := fmt.Sprintf("%s client create %s -d -f %s %s",
|
||||
knifeCmd,
|
||||
p.NodeName,
|
||||
path.Join(confDir, "client.pem"),
|
||||
options,
|
||||
)
|
||||
|
||||
return p.runCommand(o, comm, createClientCmd)
|
||||
}
|
||||
}
|
||||
|
||||
func (p *provisioner) configureVaultsFunc(gemCmd string, knifeCmd string, confDir string) provisionFn {
|
||||
return func(o terraform.UIOutput, comm communicator.Communicator) error {
|
||||
if err := p.runCommand(o, comm, fmt.Sprintf("%s install chef-vault", gemCmd)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
options := fmt.Sprintf("-c %s -u %s --key %s",
|
||||
path.Join(confDir, clienrb),
|
||||
p.UserName,
|
||||
path.Join(confDir, p.UserName+".pem"),
|
||||
)
|
||||
|
||||
// if client gets recreated, remove (old) client (with old keys) from vaults/items
|
||||
// otherwise, the (new) client (with new keys) will not be able to decrypt the vault
|
||||
if p.RecreateClient {
|
||||
for vault, items := range p.Vaults {
|
||||
for _, item := range items {
|
||||
deleteCmd := fmt.Sprintf("%s vault remove %s %s -C \"%s\" -M client %s",
|
||||
knifeCmd,
|
||||
vault,
|
||||
item,
|
||||
p.NodeName,
|
||||
options,
|
||||
)
|
||||
if err := p.runCommand(o, comm, deleteCmd); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for vault, items := range p.Vaults {
|
||||
for _, item := range items {
|
||||
updateCmd := fmt.Sprintf("%s vault update %s %s -C %s -M client %s",
|
||||
knifeCmd,
|
||||
vault,
|
||||
item,
|
||||
p.NodeName,
|
||||
options,
|
||||
)
|
||||
if err := p.runCommand(o, comm, updateCmd); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func (p *provisioner) runChefClientFunc(chefCmd string, confDir string) provisionFn {
|
||||
return func(o terraform.UIOutput, comm communicator.Communicator) error {
|
||||
fb := path.Join(confDir, firstBoot)
|
||||
var cmd string
|
||||
|
||||
// Policyfiles do not support chef environments, so don't pass the `-E` flag.
|
||||
switch {
|
||||
case p.UsePolicyfile && p.NamedRunList == "":
|
||||
cmd = fmt.Sprintf("%s -j %q", chefCmd, fb)
|
||||
case p.UsePolicyfile && p.NamedRunList != "":
|
||||
cmd = fmt.Sprintf("%s -j %q -n %q", chefCmd, fb, p.NamedRunList)
|
||||
default:
|
||||
cmd = fmt.Sprintf("%s -j %q -E %q", chefCmd, fb, p.Environment)
|
||||
}
|
||||
|
||||
if p.LogToFile {
|
||||
if err := os.MkdirAll(logfileDir, 0755); err != nil {
|
||||
return fmt.Errorf("Error creating logfile directory %s: %v", logfileDir, err)
|
||||
}
|
||||
|
||||
logFile := path.Join(logfileDir, p.NodeName)
|
||||
f, err := os.Create(path.Join(logFile))
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error creating logfile %s: %v", logFile, err)
|
||||
}
|
||||
f.Close()
|
||||
|
||||
o.Output("Writing Chef Client output to " + logFile)
|
||||
o = p
|
||||
}
|
||||
|
||||
return p.runCommand(o, comm, cmd)
|
||||
}
|
||||
}
|
||||
|
||||
// Output implementation of terraform.UIOutput interface
|
||||
func (p *provisioner) Output(output string) {
|
||||
logFile := path.Join(logfileDir, p.NodeName)
|
||||
f, err := os.OpenFile(logFile, os.O_APPEND|os.O_WRONLY, 0666)
|
||||
if err != nil {
|
||||
log.Printf("Error creating logfile %s: %v", logFile, err)
|
||||
return
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
// These steps are needed to remove any ANSI escape codes used to colorize
|
||||
// the output and to make sure we have proper line endings before writing
|
||||
// the string to the logfile.
|
||||
re := regexp.MustCompile(`\x1b\[[0-9;]+m`)
|
||||
output = re.ReplaceAllString(output, "")
|
||||
output = strings.Replace(output, "\r", "\n", -1)
|
||||
|
||||
if _, err := f.WriteString(output); err != nil {
|
||||
log.Printf("Error writing output to logfile %s: %v", logFile, err)
|
||||
}
|
||||
|
||||
if err := f.Sync(); err != nil {
|
||||
log.Printf("Error saving logfile %s to disk: %v", logFile, err)
|
||||
}
|
||||
}
|
||||
|
||||
// runCommand is used to run already prepared commands
|
||||
func (p *provisioner) runCommand(o terraform.UIOutput, comm communicator.Communicator, command string) error {
|
||||
// Unless prevented, prefix the command with sudo
|
||||
if p.useSudo {
|
||||
command = "sudo " + command
|
||||
}
|
||||
|
||||
outR, outW := io.Pipe()
|
||||
errR, errW := io.Pipe()
|
||||
go p.copyOutput(o, outR)
|
||||
go p.copyOutput(o, errR)
|
||||
defer outW.Close()
|
||||
defer errW.Close()
|
||||
|
||||
cmd := &remote.Cmd{
|
||||
Command: command,
|
||||
Stdout: outW,
|
||||
Stderr: errW,
|
||||
}
|
||||
|
||||
err := comm.Start(cmd)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error executing command %q: %v", cmd.Command, err)
|
||||
}
|
||||
|
||||
if err := cmd.Wait(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *provisioner) copyOutput(o terraform.UIOutput, r io.Reader) {
|
||||
lr := linereader.New(r)
|
||||
for line := range lr.Ch {
|
||||
o.Output(line)
|
||||
}
|
||||
}
|
||||
|
||||
func decodeConfig(d *schema.ResourceData) (*provisioner, error) {
|
||||
p := &provisioner{
|
||||
Channel: d.Get("channel").(string),
|
||||
ClientOptions: getStringList(d.Get("client_options")),
|
||||
DisableReporting: d.Get("disable_reporting").(bool),
|
||||
Environment: d.Get("environment").(string),
|
||||
FetchChefCertificates: d.Get("fetch_chef_certificates").(bool),
|
||||
LogToFile: d.Get("log_to_file").(bool),
|
||||
UsePolicyfile: d.Get("use_policyfile").(bool),
|
||||
PolicyGroup: d.Get("policy_group").(string),
|
||||
PolicyName: d.Get("policy_name").(string),
|
||||
HTTPProxy: d.Get("http_proxy").(string),
|
||||
HTTPSProxy: d.Get("https_proxy").(string),
|
||||
NOProxy: getStringList(d.Get("no_proxy")),
|
||||
MaxRetries: d.Get("max_retries").(int),
|
||||
NamedRunList: d.Get("named_run_list").(string),
|
||||
NodeName: d.Get("node_name").(string),
|
||||
OhaiHints: getStringList(d.Get("ohai_hints")),
|
||||
OSType: d.Get("os_type").(string),
|
||||
RecreateClient: d.Get("recreate_client").(bool),
|
||||
PreventSudo: d.Get("prevent_sudo").(bool),
|
||||
RetryOnExitCode: getRetryOnExitCodes(d),
|
||||
RunList: getStringList(d.Get("run_list")),
|
||||
SecretKey: d.Get("secret_key").(string),
|
||||
ServerURL: d.Get("server_url").(string),
|
||||
SkipInstall: d.Get("skip_install").(bool),
|
||||
SkipRegister: d.Get("skip_register").(bool),
|
||||
SSLVerifyMode: d.Get("ssl_verify_mode").(string),
|
||||
UserName: d.Get("user_name").(string),
|
||||
UserKey: d.Get("user_key").(string),
|
||||
Version: d.Get("version").(string),
|
||||
WaitForRetry: time.Duration(d.Get("wait_for_retry").(int)) * time.Second,
|
||||
}
|
||||
|
||||
// Make sure the supplied URL has a trailing slash
|
||||
p.ServerURL = strings.TrimSuffix(p.ServerURL, "/") + "/"
|
||||
|
||||
for i, hint := range p.OhaiHints {
|
||||
hintPath, err := homedir.Expand(hint)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Error expanding the path %s: %v", hint, err)
|
||||
}
|
||||
p.OhaiHints[i] = hintPath
|
||||
}
|
||||
|
||||
if attrs, ok := d.GetOk("attributes_json"); ok {
|
||||
var m map[string]interface{}
|
||||
if err := json.Unmarshal([]byte(attrs.(string)), &m); err != nil {
|
||||
return nil, fmt.Errorf("Error parsing attributes_json: %v", err)
|
||||
}
|
||||
p.Attributes = m
|
||||
}
|
||||
|
||||
if vaults, ok := d.GetOk("vault_json"); ok {
|
||||
var m map[string]interface{}
|
||||
if err := json.Unmarshal([]byte(vaults.(string)), &m); err != nil {
|
||||
return nil, fmt.Errorf("Error parsing vault_json: %v", err)
|
||||
}
|
||||
|
||||
v := make(map[string][]string)
|
||||
for vault, items := range m {
|
||||
switch items := items.(type) {
|
||||
case []interface{}:
|
||||
for _, item := range items {
|
||||
if item, ok := item.(string); ok {
|
||||
v[vault] = append(v[vault], item)
|
||||
}
|
||||
}
|
||||
case interface{}:
|
||||
if item, ok := items.(string); ok {
|
||||
v[vault] = append(v[vault], item)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
p.Vaults = v
|
||||
}
|
||||
|
||||
return p, nil
|
||||
}
|
||||
|
||||
func getRetryOnExitCodes(d *schema.ResourceData) map[int]bool {
|
||||
result := make(map[int]bool)
|
||||
|
||||
v, ok := d.GetOk("retry_on_exit_code")
|
||||
if !ok || v == nil {
|
||||
// Use default exit codes
|
||||
result[35] = true
|
||||
result[37] = true
|
||||
result[213] = true
|
||||
return result
|
||||
}
|
||||
|
||||
switch v := v.(type) {
|
||||
case []interface{}:
|
||||
for _, vv := range v {
|
||||
if vv, ok := vv.(int); ok {
|
||||
result[vv] = true
|
||||
}
|
||||
}
|
||||
return result
|
||||
default:
|
||||
panic(fmt.Sprintf("Unsupported type: %T", v))
|
||||
}
|
||||
}
|
||||
|
||||
func getStringList(v interface{}) []string {
|
||||
var result []string
|
||||
|
||||
switch v := v.(type) {
|
||||
case nil:
|
||||
return result
|
||||
case []interface{}:
|
||||
for _, vv := range v {
|
||||
if vv, ok := vv.(string); ok {
|
||||
result = append(result, vv)
|
||||
}
|
||||
}
|
||||
return result
|
||||
default:
|
||||
panic(fmt.Sprintf("Unsupported type: %T", v))
|
||||
}
|
||||
}
|
|
@ -1,435 +0,0 @@
|
|||
package chef
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"path"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/communicator"
|
||||
"github.com/hashicorp/terraform/configs/hcl2shim"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
)
|
||||
|
||||
func TestResourceProvisioner_impl(t *testing.T) {
|
||||
var _ terraform.ResourceProvisioner = Provisioner()
|
||||
}
|
||||
|
||||
func TestProvisioner(t *testing.T) {
|
||||
if err := Provisioner().(*schema.Provisioner).InternalValidate(); err != nil {
|
||||
t.Fatalf("err: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestResourceProvider_Validate_good(t *testing.T) {
|
||||
c := testConfig(t, map[string]interface{}{
|
||||
"environment": "_default",
|
||||
"node_name": "nodename1",
|
||||
"run_list": []interface{}{"cookbook::recipe"},
|
||||
"server_url": "https://chef.local",
|
||||
"user_name": "bob",
|
||||
"user_key": "USER-KEY",
|
||||
})
|
||||
|
||||
warn, errs := Provisioner().Validate(c)
|
||||
if len(warn) > 0 {
|
||||
t.Fatalf("Warnings: %v", warn)
|
||||
}
|
||||
if len(errs) > 0 {
|
||||
t.Fatalf("Errors: %v", errs)
|
||||
}
|
||||
}
|
||||
|
||||
func TestResourceProvider_Validate_bad(t *testing.T) {
|
||||
c := testConfig(t, map[string]interface{}{
|
||||
"invalid": "nope",
|
||||
})
|
||||
|
||||
warn, errs := Provisioner().Validate(c)
|
||||
if len(warn) > 0 {
|
||||
t.Fatalf("Warnings: %v", warn)
|
||||
}
|
||||
if len(errs) == 0 {
|
||||
t.Fatalf("Should have errors")
|
||||
}
|
||||
}
|
||||
|
||||
// Test that the JSON attributes with an unknown value don't
|
||||
// validate.
|
||||
func TestResourceProvider_Validate_computedValues(t *testing.T) {
|
||||
c := testConfig(t, map[string]interface{}{
|
||||
"environment": "_default",
|
||||
"node_name": "nodename1",
|
||||
"run_list": []interface{}{"cookbook::recipe"},
|
||||
"server_url": "https://chef.local",
|
||||
"user_name": "bob",
|
||||
"user_key": "USER-KEY",
|
||||
"attributes_json": hcl2shim.UnknownVariableValue,
|
||||
})
|
||||
|
||||
warn, errs := Provisioner().Validate(c)
|
||||
if len(warn) > 0 {
|
||||
t.Fatalf("Warnings: %v", warn)
|
||||
}
|
||||
if len(errs) > 0 {
|
||||
t.Fatalf("Errors: %v", errs)
|
||||
}
|
||||
}
|
||||
|
||||
func TestResourceProvider_runChefClient(t *testing.T) {
|
||||
cases := map[string]struct {
|
||||
Config map[string]interface{}
|
||||
ChefCmd string
|
||||
ConfDir string
|
||||
Commands map[string]bool
|
||||
}{
|
||||
"Sudo": {
|
||||
Config: map[string]interface{}{
|
||||
"node_name": "nodename1",
|
||||
"run_list": []interface{}{"cookbook::recipe"},
|
||||
"server_url": "https://chef.local",
|
||||
"user_name": "bob",
|
||||
"user_key": "USER-KEY",
|
||||
},
|
||||
|
||||
ChefCmd: linuxChefCmd,
|
||||
|
||||
ConfDir: linuxConfDir,
|
||||
|
||||
Commands: map[string]bool{
|
||||
fmt.Sprintf(`sudo %s -j %q -E "_default"`,
|
||||
linuxChefCmd,
|
||||
path.Join(linuxConfDir, "first-boot.json")): true,
|
||||
},
|
||||
},
|
||||
|
||||
"NoSudo": {
|
||||
Config: map[string]interface{}{
|
||||
"node_name": "nodename1",
|
||||
"prevent_sudo": true,
|
||||
"run_list": []interface{}{"cookbook::recipe"},
|
||||
"server_url": "https://chef.local",
|
||||
"user_name": "bob",
|
||||
"user_key": "USER-KEY",
|
||||
},
|
||||
|
||||
ChefCmd: linuxChefCmd,
|
||||
|
||||
ConfDir: linuxConfDir,
|
||||
|
||||
Commands: map[string]bool{
|
||||
fmt.Sprintf(`%s -j %q -E "_default"`,
|
||||
linuxChefCmd,
|
||||
path.Join(linuxConfDir, "first-boot.json")): true,
|
||||
},
|
||||
},
|
||||
|
||||
"Environment": {
|
||||
Config: map[string]interface{}{
|
||||
"environment": "production",
|
||||
"node_name": "nodename1",
|
||||
"prevent_sudo": true,
|
||||
"run_list": []interface{}{"cookbook::recipe"},
|
||||
"server_url": "https://chef.local",
|
||||
"user_name": "bob",
|
||||
"user_key": "USER-KEY",
|
||||
},
|
||||
|
||||
ChefCmd: windowsChefCmd,
|
||||
|
||||
ConfDir: windowsConfDir,
|
||||
|
||||
Commands: map[string]bool{
|
||||
fmt.Sprintf(`%s -j %q -E "production"`,
|
||||
windowsChefCmd,
|
||||
path.Join(windowsConfDir, "first-boot.json")): true,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
o := new(terraform.MockUIOutput)
|
||||
c := new(communicator.MockCommunicator)
|
||||
|
||||
for k, tc := range cases {
|
||||
c.Commands = tc.Commands
|
||||
|
||||
p, err := decodeConfig(
|
||||
schema.TestResourceDataRaw(t, Provisioner().(*schema.Provisioner).Schema, tc.Config),
|
||||
)
|
||||
if err != nil {
|
||||
t.Fatalf("Error: %v", err)
|
||||
}
|
||||
|
||||
p.runChefClient = p.runChefClientFunc(tc.ChefCmd, tc.ConfDir)
|
||||
p.useSudo = !p.PreventSudo
|
||||
|
||||
err = p.runChefClient(o, c)
|
||||
if err != nil {
|
||||
t.Fatalf("Test %q failed: %v", k, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestResourceProvider_fetchChefCertificates(t *testing.T) {
|
||||
cases := map[string]struct {
|
||||
Config map[string]interface{}
|
||||
KnifeCmd string
|
||||
ConfDir string
|
||||
Commands map[string]bool
|
||||
}{
|
||||
"Sudo": {
|
||||
Config: map[string]interface{}{
|
||||
"fetch_chef_certificates": true,
|
||||
"node_name": "nodename1",
|
||||
"run_list": []interface{}{"cookbook::recipe"},
|
||||
"server_url": "https://chef.local",
|
||||
"user_name": "bob",
|
||||
"user_key": "USER-KEY",
|
||||
},
|
||||
|
||||
KnifeCmd: linuxKnifeCmd,
|
||||
|
||||
ConfDir: linuxConfDir,
|
||||
|
||||
Commands: map[string]bool{
|
||||
fmt.Sprintf(`sudo %s ssl fetch -c %s`,
|
||||
linuxKnifeCmd,
|
||||
path.Join(linuxConfDir, "client.rb")): true,
|
||||
},
|
||||
},
|
||||
|
||||
"NoSudo": {
|
||||
Config: map[string]interface{}{
|
||||
"fetch_chef_certificates": true,
|
||||
"node_name": "nodename1",
|
||||
"prevent_sudo": true,
|
||||
"run_list": []interface{}{"cookbook::recipe"},
|
||||
"server_url": "https://chef.local",
|
||||
"user_name": "bob",
|
||||
"user_key": "USER-KEY",
|
||||
},
|
||||
|
||||
KnifeCmd: windowsKnifeCmd,
|
||||
|
||||
ConfDir: windowsConfDir,
|
||||
|
||||
Commands: map[string]bool{
|
||||
fmt.Sprintf(`%s ssl fetch -c %s`,
|
||||
windowsKnifeCmd,
|
||||
path.Join(windowsConfDir, "client.rb")): true,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
o := new(terraform.MockUIOutput)
|
||||
c := new(communicator.MockCommunicator)
|
||||
|
||||
for k, tc := range cases {
|
||||
c.Commands = tc.Commands
|
||||
|
||||
p, err := decodeConfig(
|
||||
schema.TestResourceDataRaw(t, Provisioner().(*schema.Provisioner).Schema, tc.Config),
|
||||
)
|
||||
if err != nil {
|
||||
t.Fatalf("Error: %v", err)
|
||||
}
|
||||
|
||||
p.fetchChefCertificates = p.fetchChefCertificatesFunc(tc.KnifeCmd, tc.ConfDir)
|
||||
p.useSudo = !p.PreventSudo
|
||||
|
||||
err = p.fetchChefCertificates(o, c)
|
||||
if err != nil {
|
||||
t.Fatalf("Test %q failed: %v", k, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestResourceProvider_configureVaults(t *testing.T) {
|
||||
cases := map[string]struct {
|
||||
Config map[string]interface{}
|
||||
GemCmd string
|
||||
KnifeCmd string
|
||||
ConfDir string
|
||||
Commands map[string]bool
|
||||
}{
|
||||
"Linux Vault string": {
|
||||
Config: map[string]interface{}{
|
||||
"node_name": "nodename1",
|
||||
"prevent_sudo": true,
|
||||
"run_list": []interface{}{"cookbook::recipe"},
|
||||
"server_url": "https://chef.local",
|
||||
"user_name": "bob",
|
||||
"user_key": "USER-KEY",
|
||||
"vault_json": `{"vault1": "item1"}`,
|
||||
},
|
||||
|
||||
GemCmd: linuxGemCmd,
|
||||
KnifeCmd: linuxKnifeCmd,
|
||||
ConfDir: linuxConfDir,
|
||||
|
||||
Commands: map[string]bool{
|
||||
fmt.Sprintf("%s install chef-vault", linuxGemCmd): true,
|
||||
fmt.Sprintf("%s vault update vault1 item1 -C nodename1 -M client -c %s/client.rb "+
|
||||
"-u bob --key %s/bob.pem", linuxKnifeCmd, linuxConfDir, linuxConfDir): true,
|
||||
},
|
||||
},
|
||||
|
||||
"Linux Vault []string": {
|
||||
Config: map[string]interface{}{
|
||||
"fetch_chef_certificates": true,
|
||||
"node_name": "nodename1",
|
||||
"prevent_sudo": true,
|
||||
"run_list": []interface{}{"cookbook::recipe"},
|
||||
"server_url": "https://chef.local",
|
||||
"user_name": "bob",
|
||||
"user_key": "USER-KEY",
|
||||
"vault_json": `{"vault1": ["item1", "item2"]}`,
|
||||
},
|
||||
|
||||
GemCmd: linuxGemCmd,
|
||||
KnifeCmd: linuxKnifeCmd,
|
||||
ConfDir: linuxConfDir,
|
||||
|
||||
Commands: map[string]bool{
|
||||
fmt.Sprintf("%s install chef-vault", linuxGemCmd): true,
|
||||
fmt.Sprintf("%s vault update vault1 item1 -C nodename1 -M client -c %s/client.rb "+
|
||||
"-u bob --key %s/bob.pem", linuxKnifeCmd, linuxConfDir, linuxConfDir): true,
|
||||
fmt.Sprintf("%s vault update vault1 item2 -C nodename1 -M client -c %s/client.rb "+
|
||||
"-u bob --key %s/bob.pem", linuxKnifeCmd, linuxConfDir, linuxConfDir): true,
|
||||
},
|
||||
},
|
||||
|
||||
"Linux Vault []string (recreate-client for vault)": {
|
||||
Config: map[string]interface{}{
|
||||
"fetch_chef_certificates": true,
|
||||
"node_name": "nodename1",
|
||||
"prevent_sudo": true,
|
||||
"run_list": []interface{}{"cookbook::recipe"},
|
||||
"server_url": "https://chef.local",
|
||||
"user_name": "bob",
|
||||
"user_key": "USER-KEY",
|
||||
"vault_json": `{"vault1": ["item1", "item2"]}`,
|
||||
"recreate_client": true,
|
||||
},
|
||||
|
||||
GemCmd: linuxGemCmd,
|
||||
KnifeCmd: linuxKnifeCmd,
|
||||
ConfDir: linuxConfDir,
|
||||
|
||||
Commands: map[string]bool{
|
||||
fmt.Sprintf("%s install chef-vault", linuxGemCmd): true,
|
||||
fmt.Sprintf("%s vault remove vault1 item1 -C \"nodename1\" -M client -c %s/client.rb "+
|
||||
"-u bob --key %s/bob.pem", linuxKnifeCmd, linuxConfDir, linuxConfDir): true,
|
||||
fmt.Sprintf("%s vault remove vault1 item2 -C \"nodename1\" -M client -c %s/client.rb "+
|
||||
"-u bob --key %s/bob.pem", linuxKnifeCmd, linuxConfDir, linuxConfDir): true,
|
||||
fmt.Sprintf("%s vault update vault1 item1 -C nodename1 -M client -c %s/client.rb "+
|
||||
"-u bob --key %s/bob.pem", linuxKnifeCmd, linuxConfDir, linuxConfDir): true,
|
||||
fmt.Sprintf("%s vault update vault1 item2 -C nodename1 -M client -c %s/client.rb "+
|
||||
"-u bob --key %s/bob.pem", linuxKnifeCmd, linuxConfDir, linuxConfDir): true,
|
||||
},
|
||||
},
|
||||
|
||||
"Windows Vault string": {
|
||||
Config: map[string]interface{}{
|
||||
"node_name": "nodename1",
|
||||
"prevent_sudo": true,
|
||||
"run_list": []interface{}{"cookbook::recipe"},
|
||||
"server_url": "https://chef.local",
|
||||
"user_name": "bob",
|
||||
"user_key": "USER-KEY",
|
||||
"vault_json": `{"vault1": "item1"}`,
|
||||
},
|
||||
|
||||
GemCmd: windowsGemCmd,
|
||||
KnifeCmd: windowsKnifeCmd,
|
||||
ConfDir: windowsConfDir,
|
||||
|
||||
Commands: map[string]bool{
|
||||
fmt.Sprintf("%s install chef-vault", windowsGemCmd): true,
|
||||
fmt.Sprintf("%s vault update vault1 item1 -C nodename1 -M client -c %s/client.rb "+
|
||||
"-u bob --key %s/bob.pem", windowsKnifeCmd, windowsConfDir, windowsConfDir): true,
|
||||
},
|
||||
},
|
||||
|
||||
"Windows Vault []string": {
|
||||
Config: map[string]interface{}{
|
||||
"fetch_chef_certificates": true,
|
||||
"node_name": "nodename1",
|
||||
"prevent_sudo": true,
|
||||
"run_list": []interface{}{"cookbook::recipe"},
|
||||
"server_url": "https://chef.local",
|
||||
"user_name": "bob",
|
||||
"user_key": "USER-KEY",
|
||||
"vault_json": `{"vault1": ["item1", "item2"]}`,
|
||||
},
|
||||
|
||||
GemCmd: windowsGemCmd,
|
||||
KnifeCmd: windowsKnifeCmd,
|
||||
ConfDir: windowsConfDir,
|
||||
|
||||
Commands: map[string]bool{
|
||||
fmt.Sprintf("%s install chef-vault", windowsGemCmd): true,
|
||||
fmt.Sprintf("%s vault update vault1 item1 -C nodename1 -M client -c %s/client.rb "+
|
||||
"-u bob --key %s/bob.pem", windowsKnifeCmd, windowsConfDir, windowsConfDir): true,
|
||||
fmt.Sprintf("%s vault update vault1 item2 -C nodename1 -M client -c %s/client.rb "+
|
||||
"-u bob --key %s/bob.pem", windowsKnifeCmd, windowsConfDir, windowsConfDir): true,
|
||||
},
|
||||
},
|
||||
|
||||
"Windows Vault [] string (recreate-client for vault)": {
|
||||
Config: map[string]interface{}{
|
||||
"fetch_chef_certificates": true,
|
||||
"node_name": "nodename1",
|
||||
"prevent_sudo": true,
|
||||
"run_list": []interface{}{"cookbook::recipe"},
|
||||
"server_url": "https://chef.local",
|
||||
"user_name": "bob",
|
||||
"user_key": "USER-KEY",
|
||||
"vault_json": `{"vault1": ["item1", "item2"]}`,
|
||||
"recreate_client": true,
|
||||
},
|
||||
|
||||
GemCmd: windowsGemCmd,
|
||||
KnifeCmd: windowsKnifeCmd,
|
||||
ConfDir: windowsConfDir,
|
||||
|
||||
Commands: map[string]bool{
|
||||
fmt.Sprintf("%s install chef-vault", windowsGemCmd): true,
|
||||
fmt.Sprintf("%s vault remove vault1 item1 -C \"nodename1\" -M client -c %s/client.rb "+
|
||||
"-u bob --key %s/bob.pem", windowsKnifeCmd, windowsConfDir, windowsConfDir): true,
|
||||
fmt.Sprintf("%s vault remove vault1 item2 -C \"nodename1\" -M client -c %s/client.rb "+
|
||||
"-u bob --key %s/bob.pem", windowsKnifeCmd, windowsConfDir, windowsConfDir): true,
|
||||
fmt.Sprintf("%s vault update vault1 item1 -C nodename1 -M client -c %s/client.rb "+
|
||||
"-u bob --key %s/bob.pem", windowsKnifeCmd, windowsConfDir, windowsConfDir): true,
|
||||
fmt.Sprintf("%s vault update vault1 item2 -C nodename1 -M client -c %s/client.rb "+
|
||||
"-u bob --key %s/bob.pem", windowsKnifeCmd, windowsConfDir, windowsConfDir): true,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
o := new(terraform.MockUIOutput)
|
||||
c := new(communicator.MockCommunicator)
|
||||
|
||||
for k, tc := range cases {
|
||||
c.Commands = tc.Commands
|
||||
|
||||
p, err := decodeConfig(
|
||||
schema.TestResourceDataRaw(t, Provisioner().(*schema.Provisioner).Schema, tc.Config),
|
||||
)
|
||||
if err != nil {
|
||||
t.Fatalf("Error: %v", err)
|
||||
}
|
||||
|
||||
p.configureVaults = p.configureVaultsFunc(tc.GemCmd, tc.KnifeCmd, tc.ConfDir)
|
||||
p.useSudo = !p.PreventSudo
|
||||
|
||||
err = p.configureVaults(o, c)
|
||||
if err != nil {
|
||||
t.Fatalf("Test %q failed: %v", k, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func testConfig(t *testing.T, c map[string]interface{}) *terraform.ResourceConfig {
|
||||
return terraform.NewResourceConfigRaw(c)
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue