From 5d7cb81c0cb9980d8d686f2d1c0a59bbc408acb2 Mon Sep 17 00:00:00 2001 From: Alisdair McDiarmid Date: Wed, 1 Dec 2021 13:10:54 -0500 Subject: [PATCH 01/68] lang: Redact sensitive values from function errors Some function errors include values derived from arguments. This commit is the result of a manual audit of these errors, which resulted in: - Adding a helper function to redact sensitive values; - Applying that helper function where errors include values derived from possibly-sensitive arguments; - Cleaning up other errors which need not include those values, or were otherwise incorrect. --- internal/lang/funcs/collection.go | 4 +- internal/lang/funcs/collection_test.go | 41 ++++++ internal/lang/funcs/encoding.go | 22 +-- internal/lang/funcs/encoding_test.go | 40 +++++ internal/lang/funcs/filesystem.go | 89 ++++++----- internal/lang/funcs/filesystem_test.go | 138 +++++++++++++----- internal/lang/funcs/number.go | 24 +-- internal/lang/funcs/number_test.go | 88 ++++++++--- internal/lang/funcs/redact.go | 20 +++ internal/lang/funcs/redact_test.go | 51 +++++++ .../lang/funcs/testdata/unreadable/foobar | 0 11 files changed, 394 insertions(+), 123 deletions(-) create mode 100644 internal/lang/funcs/redact.go create mode 100644 internal/lang/funcs/redact_test.go create mode 100644 internal/lang/funcs/testdata/unreadable/foobar diff --git a/internal/lang/funcs/collection.go b/internal/lang/funcs/collection.go index f9b3e6ae4..0272b2463 100644 --- a/internal/lang/funcs/collection.go +++ b/internal/lang/funcs/collection.go @@ -311,8 +311,8 @@ var LookupFunc = function.New(&function.Spec{ return defaultVal.WithMarks(markses...), nil } - return cty.UnknownVal(cty.DynamicPseudoType).WithMarks(markses...), fmt.Errorf( - "lookup failed to find '%s'", lookupKey) + return cty.UnknownVal(cty.DynamicPseudoType), fmt.Errorf( + "lookup failed to find key %s", redactIfSensitive(lookupKey, keyMarks)) }, }) diff --git a/internal/lang/funcs/collection_test.go b/internal/lang/funcs/collection_test.go index 3ca3f9181..2a1927acf 100644 --- a/internal/lang/funcs/collection_test.go +++ b/internal/lang/funcs/collection_test.go @@ -5,6 +5,7 @@ import ( "math" "testing" + "github.com/hashicorp/terraform/internal/lang/marks" "github.com/zclconf/go-cty/cty" ) @@ -899,6 +900,46 @@ func TestLookup(t *testing.T) { } } +func TestLookup_error(t *testing.T) { + simpleMap := cty.MapVal(map[string]cty.Value{ + "foo": cty.StringVal("bar"), + }) + + tests := map[string]struct { + Values []cty.Value + WantErr string + }{ + "failed to find non-sensitive key": { + []cty.Value{ + simpleMap, + cty.StringVal("boop"), + }, + `lookup failed to find key "boop"`, + }, + "failed to find sensitive key": { + []cty.Value{ + simpleMap, + cty.StringVal("boop").Mark(marks.Sensitive), + }, + "lookup failed to find key (sensitive value)", + }, + } + + for name, test := range tests { + t.Run(name, func(t *testing.T) { + _, err := Lookup(test.Values...) + + if err == nil { + t.Fatal("succeeded; want error") + } + + if err.Error() != test.WantErr { + t.Errorf("wrong error\ngot: %#v\nwant: %#v", err, test.WantErr) + } + }) + } +} + func TestMatchkeys(t *testing.T) { tests := []struct { Keys cty.Value diff --git a/internal/lang/funcs/encoding.go b/internal/lang/funcs/encoding.go index 27fc2a29c..2e67ebc8b 100644 --- a/internal/lang/funcs/encoding.go +++ b/internal/lang/funcs/encoding.go @@ -18,22 +18,24 @@ import ( var Base64DecodeFunc = function.New(&function.Spec{ Params: []function.Parameter{ { - Name: "str", - Type: cty.String, + Name: "str", + Type: cty.String, + AllowMarked: true, }, }, Type: function.StaticReturnType(cty.String), Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { - s := args[0].AsString() + str, strMarks := args[0].Unmark() + s := str.AsString() sDec, err := base64.StdEncoding.DecodeString(s) if err != nil { - return cty.UnknownVal(cty.String), fmt.Errorf("failed to decode base64 data '%s'", s) + return cty.UnknownVal(cty.String), fmt.Errorf("failed to decode base64 data %s", redactIfSensitive(s, strMarks)) } if !utf8.Valid([]byte(sDec)) { - log.Printf("[DEBUG] the result of decoding the provided string is not valid UTF-8: %s", sDec) + log.Printf("[DEBUG] the result of decoding the provided string is not valid UTF-8: %s", redactIfSensitive(sDec, strMarks)) return cty.UnknownVal(cty.String), fmt.Errorf("the result of decoding the provided string is not valid UTF-8") } - return cty.StringVal(string(sDec)), nil + return cty.StringVal(string(sDec)).WithMarks(strMarks), nil }, }) @@ -125,7 +127,7 @@ var TextDecodeBase64Func = function.New(&function.Spec{ case base64.CorruptInputError: return cty.UnknownVal(cty.String), function.NewArgErrorf(0, "the given value is has an invalid base64 symbol at offset %d", int(err)) default: - return cty.UnknownVal(cty.String), function.NewArgErrorf(0, "invalid source string: %T", err) + return cty.UnknownVal(cty.String), function.NewArgErrorf(0, "invalid source string: %w", err) } } @@ -156,13 +158,13 @@ var Base64GzipFunc = function.New(&function.Spec{ var b bytes.Buffer gz := gzip.NewWriter(&b) if _, err := gz.Write([]byte(s)); err != nil { - return cty.UnknownVal(cty.String), fmt.Errorf("failed to write gzip raw data: '%s'", s) + return cty.UnknownVal(cty.String), fmt.Errorf("failed to write gzip raw data: %w", err) } if err := gz.Flush(); err != nil { - return cty.UnknownVal(cty.String), fmt.Errorf("failed to flush gzip writer: '%s'", s) + return cty.UnknownVal(cty.String), fmt.Errorf("failed to flush gzip writer: %w", err) } if err := gz.Close(); err != nil { - return cty.UnknownVal(cty.String), fmt.Errorf("failed to close gzip writer: '%s'", s) + return cty.UnknownVal(cty.String), fmt.Errorf("failed to close gzip writer: %w", err) } return cty.StringVal(base64.StdEncoding.EncodeToString(b.Bytes())), nil }, diff --git a/internal/lang/funcs/encoding_test.go b/internal/lang/funcs/encoding_test.go index 2aa45a374..2e05784e8 100644 --- a/internal/lang/funcs/encoding_test.go +++ b/internal/lang/funcs/encoding_test.go @@ -4,6 +4,7 @@ import ( "fmt" "testing" + "github.com/hashicorp/terraform/internal/lang/marks" "github.com/zclconf/go-cty/cty" ) @@ -18,6 +19,11 @@ func TestBase64Decode(t *testing.T) { cty.StringVal("abc123!?$*&()'-=@~"), false, }, + { + cty.StringVal("YWJjMTIzIT8kKiYoKSctPUB+").Mark(marks.Sensitive), + cty.StringVal("abc123!?$*&()'-=@~").Mark(marks.Sensitive), + false, + }, { // Invalid base64 data decoding cty.StringVal("this-is-an-invalid-base64-data"), cty.UnknownVal(cty.String), @@ -50,6 +56,40 @@ func TestBase64Decode(t *testing.T) { } } +func TestBase64Decode_error(t *testing.T) { + tests := map[string]struct { + String cty.Value + WantErr string + }{ + "invalid base64": { + cty.StringVal("dfg"), + `failed to decode base64 data "dfg"`, + }, + "sensitive invalid base64": { + cty.StringVal("dfg").Mark(marks.Sensitive), + `failed to decode base64 data (sensitive value)`, + }, + "invalid utf-8": { + cty.StringVal("whee"), + "the result of decoding the provided string is not valid UTF-8", + }, + } + + for name, test := range tests { + t.Run(name, func(t *testing.T) { + _, err := Base64Decode(test.String) + + if err == nil { + t.Fatal("succeeded; want error") + } + + if err.Error() != test.WantErr { + t.Errorf("wrong error result\ngot: %#v\nwant: %#v", err.Error(), test.WantErr) + } + }) + } +} + func TestBase64Encode(t *testing.T) { tests := []struct { String cty.Value diff --git a/internal/lang/funcs/filesystem.go b/internal/lang/funcs/filesystem.go index 846b86110..01e090a5b 100644 --- a/internal/lang/funcs/filesystem.go +++ b/internal/lang/funcs/filesystem.go @@ -23,14 +23,16 @@ func MakeFileFunc(baseDir string, encBase64 bool) function.Function { return function.New(&function.Spec{ Params: []function.Parameter{ { - Name: "path", - Type: cty.String, + Name: "path", + Type: cty.String, + AllowMarked: true, }, }, Type: function.StaticReturnType(cty.String), Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { - path := args[0].AsString() - src, err := readFileBytes(baseDir, path) + pathArg, pathMarks := args[0].Unmark() + path := pathArg.AsString() + src, err := readFileBytes(baseDir, path, pathMarks) if err != nil { err = function.NewArgError(0, err) return cty.UnknownVal(cty.String), err @@ -39,12 +41,12 @@ func MakeFileFunc(baseDir string, encBase64 bool) function.Function { switch { case encBase64: enc := base64.StdEncoding.EncodeToString(src) - return cty.StringVal(enc), nil + return cty.StringVal(enc).WithMarks(pathMarks), nil default: if !utf8.Valid(src) { - return cty.UnknownVal(cty.String), fmt.Errorf("contents of %s are not valid UTF-8; use the filebase64 function to obtain the Base64 encoded contents or the other file functions (e.g. filemd5, filesha256) to obtain file hashing results instead", path) + return cty.UnknownVal(cty.String), fmt.Errorf("contents of %s are not valid UTF-8; use the filebase64 function to obtain the Base64 encoded contents or the other file functions (e.g. filemd5, filesha256) to obtain file hashing results instead", redactIfSensitive(path, pathMarks)) } - return cty.StringVal(string(src)), nil + return cty.StringVal(string(src)).WithMarks(pathMarks), nil } }, }) @@ -67,8 +69,9 @@ func MakeTemplateFileFunc(baseDir string, funcsCb func() map[string]function.Fun params := []function.Parameter{ { - Name: "path", - Type: cty.String, + Name: "path", + Type: cty.String, + AllowMarked: true, }, { Name: "vars", @@ -76,10 +79,10 @@ func MakeTemplateFileFunc(baseDir string, funcsCb func() map[string]function.Fun }, } - loadTmpl := func(fn string) (hcl.Expression, error) { + loadTmpl := func(fn string, marks cty.ValueMarks) (hcl.Expression, error) { // We re-use File here to ensure the same filename interpretation // as it does, along with its other safety checks. - tmplVal, err := File(baseDir, cty.StringVal(fn)) + tmplVal, err := File(baseDir, cty.StringVal(fn).WithMarks(marks)) if err != nil { return nil, err } @@ -159,7 +162,9 @@ func MakeTemplateFileFunc(baseDir string, funcsCb func() map[string]function.Fun // We'll render our template now to see what result type it produces. // A template consisting only of a single interpolation an potentially // return any type. - expr, err := loadTmpl(args[0].AsString()) + + pathArg, pathMarks := args[0].Unmark() + expr, err := loadTmpl(pathArg.AsString(), pathMarks) if err != nil { return cty.DynamicPseudoType, err } @@ -170,11 +175,13 @@ func MakeTemplateFileFunc(baseDir string, funcsCb func() map[string]function.Fun return val.Type(), err }, Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { - expr, err := loadTmpl(args[0].AsString()) + pathArg, pathMarks := args[0].Unmark() + expr, err := loadTmpl(pathArg.AsString(), pathMarks) if err != nil { return cty.DynamicVal, err } - return renderTmpl(expr, args[1]) + result, err := renderTmpl(expr, args[1]) + return result.WithMarks(pathMarks), err }, }) @@ -186,16 +193,18 @@ func MakeFileExistsFunc(baseDir string) function.Function { return function.New(&function.Spec{ Params: []function.Parameter{ { - Name: "path", - Type: cty.String, + Name: "path", + Type: cty.String, + AllowMarked: true, }, }, Type: function.StaticReturnType(cty.Bool), Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { - path := args[0].AsString() + pathArg, pathMarks := args[0].Unmark() + path := pathArg.AsString() path, err := homedir.Expand(path) if err != nil { - return cty.UnknownVal(cty.Bool), fmt.Errorf("failed to expand ~: %s", err) + return cty.UnknownVal(cty.Bool), fmt.Errorf("failed to expand ~: %w", err) } if !filepath.IsAbs(path) { @@ -208,17 +217,17 @@ func MakeFileExistsFunc(baseDir string) function.Function { fi, err := os.Stat(path) if err != nil { if os.IsNotExist(err) { - return cty.False, nil + return cty.False.WithMarks(pathMarks), nil } - return cty.UnknownVal(cty.Bool), fmt.Errorf("failed to stat %s", path) + return cty.UnknownVal(cty.Bool), fmt.Errorf("failed to stat %s", redactIfSensitive(path, pathMarks)) } if fi.Mode().IsRegular() { - return cty.True, nil + return cty.True.WithMarks(pathMarks), nil } return cty.False, fmt.Errorf("%s is not a regular file, but %q", - path, fi.Mode().String()) + redactIfSensitive(path, pathMarks), fi.Mode().String()) }, }) } @@ -229,18 +238,24 @@ func MakeFileSetFunc(baseDir string) function.Function { return function.New(&function.Spec{ Params: []function.Parameter{ { - Name: "path", - Type: cty.String, + Name: "path", + Type: cty.String, + AllowMarked: true, }, { - Name: "pattern", - Type: cty.String, + Name: "pattern", + Type: cty.String, + AllowMarked: true, }, }, Type: function.StaticReturnType(cty.Set(cty.String)), Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { - path := args[0].AsString() - pattern := args[1].AsString() + pathArg, pathMarks := args[0].Unmark() + path := pathArg.AsString() + patternArg, patternMarks := args[1].Unmark() + pattern := patternArg.AsString() + + marks := []cty.ValueMarks{pathMarks, patternMarks} if !filepath.IsAbs(path) { path = filepath.Join(baseDir, path) @@ -253,7 +268,7 @@ func MakeFileSetFunc(baseDir string) function.Function { matches, err := doublestar.Glob(pattern) if err != nil { - return cty.UnknownVal(cty.Set(cty.String)), fmt.Errorf("failed to glob pattern (%s): %s", pattern, err) + return cty.UnknownVal(cty.Set(cty.String)), fmt.Errorf("failed to glob pattern %s: %w", redactIfSensitive(pattern, marks...), err) } var matchVals []cty.Value @@ -261,7 +276,7 @@ func MakeFileSetFunc(baseDir string) function.Function { fi, err := os.Stat(match) if err != nil { - return cty.UnknownVal(cty.Set(cty.String)), fmt.Errorf("failed to stat (%s): %s", match, err) + return cty.UnknownVal(cty.Set(cty.String)), fmt.Errorf("failed to stat %s: %w", redactIfSensitive(match, marks...), err) } if !fi.Mode().IsRegular() { @@ -272,7 +287,7 @@ func MakeFileSetFunc(baseDir string) function.Function { match, err = filepath.Rel(path, match) if err != nil { - return cty.UnknownVal(cty.Set(cty.String)), fmt.Errorf("failed to trim path of match (%s): %s", match, err) + return cty.UnknownVal(cty.Set(cty.String)), fmt.Errorf("failed to trim path of match %s: %w", redactIfSensitive(match, marks...), err) } // Replace any remaining file separators with forward slash (/) @@ -283,10 +298,10 @@ func MakeFileSetFunc(baseDir string) function.Function { } if len(matchVals) == 0 { - return cty.SetValEmpty(cty.String), nil + return cty.SetValEmpty(cty.String).WithMarks(marks...), nil } - return cty.SetVal(matchVals), nil + return cty.SetVal(matchVals).WithMarks(marks...), nil }, }) } @@ -355,7 +370,7 @@ var PathExpandFunc = function.New(&function.Spec{ func openFile(baseDir, path string) (*os.File, error) { path, err := homedir.Expand(path) if err != nil { - return nil, fmt.Errorf("failed to expand ~: %s", err) + return nil, fmt.Errorf("failed to expand ~: %w", err) } if !filepath.IsAbs(path) { @@ -368,12 +383,12 @@ func openFile(baseDir, path string) (*os.File, error) { return os.Open(path) } -func readFileBytes(baseDir, path string) ([]byte, error) { +func readFileBytes(baseDir, path string, marks cty.ValueMarks) ([]byte, error) { f, err := openFile(baseDir, path) if err != nil { if os.IsNotExist(err) { // An extra Terraform-specific hint for this situation - return nil, fmt.Errorf("no file exists at %s; this function works only with files that are distributed as part of the configuration source code, so if this file will be created by a resource in this configuration you must instead obtain this result from an attribute of that resource", path) + return nil, fmt.Errorf("no file exists at %s; this function works only with files that are distributed as part of the configuration source code, so if this file will be created by a resource in this configuration you must instead obtain this result from an attribute of that resource", redactIfSensitive(path, marks)) } return nil, err } @@ -381,7 +396,7 @@ func readFileBytes(baseDir, path string) ([]byte, error) { src, err := ioutil.ReadAll(f) if err != nil { - return nil, fmt.Errorf("failed to read %s", path) + return nil, fmt.Errorf("failed to read file: %w", err) } return src, nil diff --git a/internal/lang/funcs/filesystem_test.go b/internal/lang/funcs/filesystem_test.go index b91b52b1e..4e673984b 100644 --- a/internal/lang/funcs/filesystem_test.go +++ b/internal/lang/funcs/filesystem_test.go @@ -2,9 +2,11 @@ package funcs import ( "fmt" + "os" "path/filepath" "testing" + "github.com/hashicorp/terraform/internal/lang/marks" homedir "github.com/mitchellh/go-homedir" "github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty/function" @@ -15,22 +17,32 @@ func TestFile(t *testing.T) { tests := []struct { Path cty.Value Want cty.Value - Err bool + Err string }{ { cty.StringVal("testdata/hello.txt"), cty.StringVal("Hello World"), - false, + ``, }, { cty.StringVal("testdata/icon.png"), cty.NilVal, - true, // Not valid UTF-8 + `contents of "testdata/icon.png" are not valid UTF-8; use the filebase64 function to obtain the Base64 encoded contents or the other file functions (e.g. filemd5, filesha256) to obtain file hashing results instead`, + }, + { + cty.StringVal("testdata/icon.png").Mark(marks.Sensitive), + cty.NilVal, + `contents of (sensitive value) are not valid UTF-8; use the filebase64 function to obtain the Base64 encoded contents or the other file functions (e.g. filemd5, filesha256) to obtain file hashing results instead`, }, { cty.StringVal("testdata/missing"), cty.NilVal, - true, // no file exists + `no file exists at "testdata/missing"; this function works only with files that are distributed as part of the configuration source code, so if this file will be created by a resource in this configuration you must instead obtain this result from an attribute of that resource`, + }, + { + cty.StringVal("testdata/missing").Mark(marks.Sensitive), + cty.NilVal, + `no file exists at (sensitive value); this function works only with files that are distributed as part of the configuration source code, so if this file will be created by a resource in this configuration you must instead obtain this result from an attribute of that resource`, }, } @@ -38,10 +50,13 @@ func TestFile(t *testing.T) { t.Run(fmt.Sprintf("File(\".\", %#v)", test.Path), func(t *testing.T) { got, err := File(".", test.Path) - if test.Err { + if test.Err != "" { if err == nil { t.Fatal("succeeded; want error") } + if got, want := err.Error(), test.Err; got != want { + t.Errorf("wrong error\ngot: %s\nwant: %s", got, want) + } return } else if err != nil { t.Fatalf("unexpected error: %s", err) @@ -71,13 +86,19 @@ func TestTemplateFile(t *testing.T) { cty.StringVal("testdata/icon.png"), cty.EmptyObjectVal, cty.NilVal, - `contents of testdata/icon.png are not valid UTF-8; use the filebase64 function to obtain the Base64 encoded contents or the other file functions (e.g. filemd5, filesha256) to obtain file hashing results instead`, + `contents of "testdata/icon.png" are not valid UTF-8; use the filebase64 function to obtain the Base64 encoded contents or the other file functions (e.g. filemd5, filesha256) to obtain file hashing results instead`, }, { cty.StringVal("testdata/missing"), cty.EmptyObjectVal, cty.NilVal, - `no file exists at testdata/missing; this function works only with files that are distributed as part of the configuration source code, so if this file will be created by a resource in this configuration you must instead obtain this result from an attribute of that resource`, + `no file exists at "testdata/missing"; this function works only with files that are distributed as part of the configuration source code, so if this file will be created by a resource in this configuration you must instead obtain this result from an attribute of that resource`, + }, + { + cty.StringVal("testdata/secrets.txt").Mark(marks.Sensitive), + cty.EmptyObjectVal, + cty.NilVal, + `no file exists at (sensitive value); this function works only with files that are distributed as part of the configuration source code, so if this file will be created by a resource in this configuration you must instead obtain this result from an attribute of that resource`, }, { cty.StringVal("testdata/hello.tmpl"), @@ -197,33 +218,61 @@ func TestFileExists(t *testing.T) { tests := []struct { Path cty.Value Want cty.Value - Err bool + Err string }{ { cty.StringVal("testdata/hello.txt"), cty.BoolVal(true), - false, + ``, }, { - cty.StringVal(""), // empty path + cty.StringVal(""), cty.BoolVal(false), - true, + `"." is not a regular file, but "drwxr-xr-x"`, + }, + { + cty.StringVal("testdata").Mark(marks.Sensitive), + cty.BoolVal(false), + `(sensitive value) is not a regular file, but "drwxr-xr-x"`, }, { cty.StringVal("testdata/missing"), cty.BoolVal(false), - false, // no file exists + ``, + }, + { + cty.StringVal("testdata/unreadable/foobar"), + cty.BoolVal(false), + `failed to stat "testdata/unreadable/foobar"`, + }, + { + cty.StringVal("testdata/unreadable/foobar").Mark(marks.Sensitive), + cty.BoolVal(false), + `failed to stat (sensitive value)`, }, } + // Ensure "unreadable" directory cannot be listed during the test run + fi, err := os.Lstat("testdata/unreadable") + if err != nil { + t.Fatal(err) + } + os.Chmod("testdata/unreadable", 0000) + defer func(mode os.FileMode) { + os.Chmod("testdata/unreadable", mode) + }(fi.Mode()) + for _, test := range tests { t.Run(fmt.Sprintf("FileExists(\".\", %#v)", test.Path), func(t *testing.T) { got, err := FileExists(".", test.Path) - if test.Err { + if test.Err != "" { if err == nil { t.Fatal("succeeded; want error") } + if got, want := err.Error(), test.Err; got != want { + t.Errorf("wrong error\ngot: %s\nwant: %s", got, want) + } return } else if err != nil { t.Fatalf("unexpected error: %s", err) @@ -241,49 +290,49 @@ func TestFileSet(t *testing.T) { Path cty.Value Pattern cty.Value Want cty.Value - Err bool + Err string }{ { cty.StringVal("."), cty.StringVal("testdata*"), cty.SetValEmpty(cty.String), - false, + ``, }, { cty.StringVal("."), cty.StringVal("testdata"), cty.SetValEmpty(cty.String), - false, + ``, }, { cty.StringVal("."), cty.StringVal("{testdata,missing}"), cty.SetValEmpty(cty.String), - false, + ``, }, { cty.StringVal("."), cty.StringVal("testdata/missing"), cty.SetValEmpty(cty.String), - false, + ``, }, { cty.StringVal("."), cty.StringVal("testdata/missing*"), cty.SetValEmpty(cty.String), - false, + ``, }, { cty.StringVal("."), cty.StringVal("*/missing"), cty.SetValEmpty(cty.String), - false, + ``, }, { cty.StringVal("."), cty.StringVal("**/missing"), cty.SetValEmpty(cty.String), - false, + ``, }, { cty.StringVal("."), @@ -291,7 +340,7 @@ func TestFileSet(t *testing.T) { cty.SetVal([]cty.Value{ cty.StringVal("testdata/hello.txt"), }), - false, + ``, }, { cty.StringVal("."), @@ -299,7 +348,7 @@ func TestFileSet(t *testing.T) { cty.SetVal([]cty.Value{ cty.StringVal("testdata/hello.txt"), }), - false, + ``, }, { cty.StringVal("."), @@ -307,7 +356,7 @@ func TestFileSet(t *testing.T) { cty.SetVal([]cty.Value{ cty.StringVal("testdata/hello.txt"), }), - false, + ``, }, { cty.StringVal("."), @@ -316,7 +365,7 @@ func TestFileSet(t *testing.T) { cty.StringVal("testdata/hello.tmpl"), cty.StringVal("testdata/hello.txt"), }), - false, + ``, }, { cty.StringVal("."), @@ -325,7 +374,7 @@ func TestFileSet(t *testing.T) { cty.StringVal("testdata/hello.tmpl"), cty.StringVal("testdata/hello.txt"), }), - false, + ``, }, { cty.StringVal("."), @@ -333,7 +382,7 @@ func TestFileSet(t *testing.T) { cty.SetVal([]cty.Value{ cty.StringVal("testdata/hello.txt"), }), - false, + ``, }, { cty.StringVal("."), @@ -341,7 +390,7 @@ func TestFileSet(t *testing.T) { cty.SetVal([]cty.Value{ cty.StringVal("testdata/hello.txt"), }), - false, + ``, }, { cty.StringVal("."), @@ -350,7 +399,7 @@ func TestFileSet(t *testing.T) { cty.StringVal("testdata/hello.tmpl"), cty.StringVal("testdata/hello.txt"), }), - false, + ``, }, { cty.StringVal("."), @@ -359,7 +408,7 @@ func TestFileSet(t *testing.T) { cty.StringVal("testdata/hello.tmpl"), cty.StringVal("testdata/hello.txt"), }), - false, + ``, }, { cty.StringVal("."), @@ -368,31 +417,37 @@ func TestFileSet(t *testing.T) { cty.StringVal("testdata/hello.tmpl"), cty.StringVal("testdata/hello.txt"), }), - false, + ``, }, { cty.StringVal("."), cty.StringVal("["), cty.SetValEmpty(cty.String), - true, + `failed to glob pattern "[": syntax error in pattern`, + }, + { + cty.StringVal("."), + cty.StringVal("[").Mark(marks.Sensitive), + cty.SetValEmpty(cty.String), + `failed to glob pattern (sensitive value): syntax error in pattern`, }, { cty.StringVal("."), cty.StringVal("\\"), cty.SetValEmpty(cty.String), - true, + `failed to glob pattern "\\": syntax error in pattern`, }, { cty.StringVal("testdata"), cty.StringVal("missing"), cty.SetValEmpty(cty.String), - false, + ``, }, { cty.StringVal("testdata"), cty.StringVal("missing*"), cty.SetValEmpty(cty.String), - false, + ``, }, { cty.StringVal("testdata"), @@ -400,7 +455,7 @@ func TestFileSet(t *testing.T) { cty.SetVal([]cty.Value{ cty.StringVal("hello.txt"), }), - false, + ``, }, { cty.StringVal("testdata"), @@ -408,7 +463,7 @@ func TestFileSet(t *testing.T) { cty.SetVal([]cty.Value{ cty.StringVal("hello.txt"), }), - false, + ``, }, { cty.StringVal("testdata"), @@ -416,7 +471,7 @@ func TestFileSet(t *testing.T) { cty.SetVal([]cty.Value{ cty.StringVal("hello.txt"), }), - false, + ``, }, { cty.StringVal("testdata"), @@ -425,7 +480,7 @@ func TestFileSet(t *testing.T) { cty.StringVal("hello.tmpl"), cty.StringVal("hello.txt"), }), - false, + ``, }, } @@ -433,10 +488,13 @@ func TestFileSet(t *testing.T) { t.Run(fmt.Sprintf("FileSet(\".\", %#v, %#v)", test.Path, test.Pattern), func(t *testing.T) { got, err := FileSet(".", test.Path, test.Pattern) - if test.Err { + if test.Err != "" { if err == nil { t.Fatal("succeeded; want error") } + if got, want := err.Error(), test.Err; got != want { + t.Errorf("wrong error\ngot: %s\nwant: %s", got, want) + } return } else if err != nil { t.Fatalf("unexpected error: %s", err) diff --git a/internal/lang/funcs/number.go b/internal/lang/funcs/number.go index 43effec12..d95870610 100644 --- a/internal/lang/funcs/number.go +++ b/internal/lang/funcs/number.go @@ -95,12 +95,14 @@ var SignumFunc = function.New(&function.Spec{ var ParseIntFunc = function.New(&function.Spec{ Params: []function.Parameter{ { - Name: "number", - Type: cty.DynamicPseudoType, + Name: "number", + Type: cty.DynamicPseudoType, + AllowMarked: true, }, { - Name: "base", - Type: cty.Number, + Name: "base", + Type: cty.Number, + AllowMarked: true, }, }, @@ -116,11 +118,13 @@ var ParseIntFunc = function.New(&function.Spec{ var base int var err error - if err = gocty.FromCtyValue(args[0], &numstr); err != nil { + numArg, numMarks := args[0].Unmark() + if err = gocty.FromCtyValue(numArg, &numstr); err != nil { return cty.UnknownVal(cty.String), function.NewArgError(0, err) } - if err = gocty.FromCtyValue(args[1], &base); err != nil { + baseArg, baseMarks := args[1].Unmark() + if err = gocty.FromCtyValue(baseArg, &base); err != nil { return cty.UnknownVal(cty.Number), function.NewArgError(1, err) } @@ -135,13 +139,13 @@ var ParseIntFunc = function.New(&function.Spec{ if !ok { return cty.UnknownVal(cty.Number), function.NewArgErrorf( 0, - "cannot parse %q as a base %d integer", - numstr, - base, + "cannot parse %s as a base %s integer", + redactIfSensitive(numstr, numMarks), + redactIfSensitive(base, baseMarks), ) } - parsedNum := cty.NumberVal((&big.Float{}).SetInt(num)) + parsedNum := cty.NumberVal((&big.Float{}).SetInt(num)).WithMarks(numMarks, baseMarks) return parsedNum, nil }, diff --git a/internal/lang/funcs/number_test.go b/internal/lang/funcs/number_test.go index b467a429f..260e0127c 100644 --- a/internal/lang/funcs/number_test.go +++ b/internal/lang/funcs/number_test.go @@ -4,6 +4,7 @@ import ( "fmt" "testing" + "github.com/hashicorp/terraform/internal/lang/marks" "github.com/zclconf/go-cty/cty" ) @@ -187,139 +188,175 @@ func TestParseInt(t *testing.T) { Num cty.Value Base cty.Value Want cty.Value - Err bool + Err string }{ { cty.StringVal("128"), cty.NumberIntVal(10), cty.NumberIntVal(128), - false, + ``, + }, + { + cty.StringVal("128").Mark(marks.Sensitive), + cty.NumberIntVal(10), + cty.NumberIntVal(128).Mark(marks.Sensitive), + ``, + }, + { + cty.StringVal("128"), + cty.NumberIntVal(10).Mark(marks.Sensitive), + cty.NumberIntVal(128).Mark(marks.Sensitive), + ``, + }, + { + cty.StringVal("128").Mark(marks.Sensitive), + cty.NumberIntVal(10).Mark(marks.Sensitive), + cty.NumberIntVal(128).Mark(marks.Sensitive), + ``, + }, + { + cty.StringVal("128").Mark(marks.Raw), + cty.NumberIntVal(10).Mark(marks.Sensitive), + cty.NumberIntVal(128).WithMarks(cty.NewValueMarks(marks.Raw, marks.Sensitive)), + ``, }, { cty.StringVal("-128"), cty.NumberIntVal(10), cty.NumberIntVal(-128), - false, + ``, }, { cty.StringVal("00128"), cty.NumberIntVal(10), cty.NumberIntVal(128), - false, + ``, }, { cty.StringVal("-00128"), cty.NumberIntVal(10), cty.NumberIntVal(-128), - false, + ``, }, { cty.StringVal("FF00"), cty.NumberIntVal(16), cty.NumberIntVal(65280), - false, + ``, }, { cty.StringVal("ff00"), cty.NumberIntVal(16), cty.NumberIntVal(65280), - false, + ``, }, { cty.StringVal("-FF00"), cty.NumberIntVal(16), cty.NumberIntVal(-65280), - false, + ``, }, { cty.StringVal("00FF00"), cty.NumberIntVal(16), cty.NumberIntVal(65280), - false, + ``, }, { cty.StringVal("-00FF00"), cty.NumberIntVal(16), cty.NumberIntVal(-65280), - false, + ``, }, { cty.StringVal("1011111011101111"), cty.NumberIntVal(2), cty.NumberIntVal(48879), - false, + ``, }, { cty.StringVal("aA"), cty.NumberIntVal(62), cty.NumberIntVal(656), - false, + ``, }, { cty.StringVal("Aa"), cty.NumberIntVal(62), cty.NumberIntVal(2242), - false, + ``, }, { cty.StringVal("999999999999999999999999999999999999999999999999999999999999"), cty.NumberIntVal(10), cty.MustParseNumberVal("999999999999999999999999999999999999999999999999999999999999"), - false, + ``, }, { cty.StringVal("FF"), cty.NumberIntVal(10), cty.UnknownVal(cty.Number), - true, + `cannot parse "FF" as a base 10 integer`, + }, + { + cty.StringVal("FF").Mark(marks.Sensitive), + cty.NumberIntVal(10), + cty.UnknownVal(cty.Number), + `cannot parse (sensitive value) as a base 10 integer`, + }, + { + cty.StringVal("FF").Mark(marks.Sensitive), + cty.NumberIntVal(10).Mark(marks.Sensitive), + cty.UnknownVal(cty.Number), + `cannot parse (sensitive value) as a base (sensitive value) integer`, }, { cty.StringVal("00FF"), cty.NumberIntVal(10), cty.UnknownVal(cty.Number), - true, + `cannot parse "00FF" as a base 10 integer`, }, { cty.StringVal("-00FF"), cty.NumberIntVal(10), cty.UnknownVal(cty.Number), - true, + `cannot parse "-00FF" as a base 10 integer`, }, { cty.NumberIntVal(2), cty.NumberIntVal(10), cty.UnknownVal(cty.Number), - true, + `first argument must be a string, not number`, }, { cty.StringVal("1"), cty.NumberIntVal(63), cty.UnknownVal(cty.Number), - true, + `base must be a whole number between 2 and 62 inclusive`, }, { cty.StringVal("1"), cty.NumberIntVal(-1), cty.UnknownVal(cty.Number), - true, + `base must be a whole number between 2 and 62 inclusive`, }, { cty.StringVal("1"), cty.NumberIntVal(1), cty.UnknownVal(cty.Number), - true, + `base must be a whole number between 2 and 62 inclusive`, }, { cty.StringVal("1"), cty.NumberIntVal(0), cty.UnknownVal(cty.Number), - true, + `base must be a whole number between 2 and 62 inclusive`, }, { cty.StringVal("1.2"), cty.NumberIntVal(10), cty.UnknownVal(cty.Number), - true, + `cannot parse "1.2" as a base 10 integer`, }, } @@ -327,10 +364,13 @@ func TestParseInt(t *testing.T) { t.Run(fmt.Sprintf("parseint(%#v, %#v)", test.Num, test.Base), func(t *testing.T) { got, err := ParseInt(test.Num, test.Base) - if test.Err { + if test.Err != "" { if err == nil { t.Fatal("succeeded; want error") } + if got, want := err.Error(), test.Err; got != want { + t.Errorf("wrong error\ngot: %s\nwant: %s", got, want) + } return } else if err != nil { t.Fatalf("unexpected error: %s", err) diff --git a/internal/lang/funcs/redact.go b/internal/lang/funcs/redact.go new file mode 100644 index 000000000..bbec3f0a1 --- /dev/null +++ b/internal/lang/funcs/redact.go @@ -0,0 +1,20 @@ +package funcs + +import ( + "fmt" + + "github.com/hashicorp/terraform/internal/lang/marks" + "github.com/zclconf/go-cty/cty" +) + +func redactIfSensitive(value interface{}, markses ...cty.ValueMarks) string { + if marks.Has(cty.DynamicVal.WithMarks(markses...), marks.Sensitive) { + return "(sensitive value)" + } + switch v := value.(type) { + case string: + return fmt.Sprintf("%q", v) + default: + return fmt.Sprintf("%v", v) + } +} diff --git a/internal/lang/funcs/redact_test.go b/internal/lang/funcs/redact_test.go new file mode 100644 index 000000000..b45721fb9 --- /dev/null +++ b/internal/lang/funcs/redact_test.go @@ -0,0 +1,51 @@ +package funcs + +import ( + "testing" + + "github.com/hashicorp/terraform/internal/lang/marks" + "github.com/zclconf/go-cty/cty" +) + +func TestRedactIfSensitive(t *testing.T) { + testCases := map[string]struct { + value interface{} + marks []cty.ValueMarks + want string + }{ + "sensitive string": { + value: "foo", + marks: []cty.ValueMarks{cty.NewValueMarks(marks.Sensitive)}, + want: "(sensitive value)", + }, + "raw non-sensitive string": { + value: "foo", + marks: []cty.ValueMarks{cty.NewValueMarks(marks.Raw)}, + want: `"foo"`, + }, + "raw sensitive string": { + value: "foo", + marks: []cty.ValueMarks{cty.NewValueMarks(marks.Raw), cty.NewValueMarks(marks.Sensitive)}, + want: "(sensitive value)", + }, + "sensitive number": { + value: 12345, + marks: []cty.ValueMarks{cty.NewValueMarks(marks.Sensitive)}, + want: "(sensitive value)", + }, + "non-sensitive number": { + value: 12345, + marks: []cty.ValueMarks{}, + want: "12345", + }, + } + + for name, tc := range testCases { + t.Run(name, func(t *testing.T) { + got := redactIfSensitive(tc.value, tc.marks...) + if got != tc.want { + t.Errorf("wrong result, got %v, want %v", got, tc.want) + } + }) + } +} diff --git a/internal/lang/funcs/testdata/unreadable/foobar b/internal/lang/funcs/testdata/unreadable/foobar new file mode 100644 index 000000000..e69de29bb From 0771a214d8e19513bdaac338b7b28c8b50eaa202 Mon Sep 17 00:00:00 2001 From: James Bardin Date: Mon, 6 Dec 2021 14:28:34 -0500 Subject: [PATCH 02/68] skip provider resolution when there are errors If there are errors loading the configuration, don't try to resolve the provider names, as those names may not even be valid. --- internal/configs/config_build.go | 10 +++++++--- .../config-diagnostics/invalid-provider/errors | 1 + .../config-diagnostics/invalid-provider/main.tf | 3 +++ .../config-diagnostics/invalid-provider/mod/main.tf | 2 ++ .../config-diagnostics/invalid-provider/warnings | 1 + 5 files changed, 14 insertions(+), 3 deletions(-) create mode 100644 internal/configs/testdata/config-diagnostics/invalid-provider/errors create mode 100644 internal/configs/testdata/config-diagnostics/invalid-provider/main.tf create mode 100644 internal/configs/testdata/config-diagnostics/invalid-provider/mod/main.tf create mode 100644 internal/configs/testdata/config-diagnostics/invalid-provider/warnings diff --git a/internal/configs/config_build.go b/internal/configs/config_build.go index e7ebe5021..96dcbed25 100644 --- a/internal/configs/config_build.go +++ b/internal/configs/config_build.go @@ -23,9 +23,13 @@ func BuildConfig(root *Module, walker ModuleWalker) (*Config, hcl.Diagnostics) { cfg.Root = cfg // Root module is self-referential. cfg.Children, diags = buildChildModules(cfg, walker) - // Now that the config is built, we can connect the provider names to all - // the known types for validation. - cfg.resolveProviderTypes() + // Skip provider resolution if there are any errors, since the provider + // configurations themselves may not be valid. + if !diags.HasErrors() { + // Now that the config is built, we can connect the provider names to all + // the known types for validation. + cfg.resolveProviderTypes() + } diags = append(diags, validateProviderConfigs(nil, cfg, false)...) diff --git a/internal/configs/testdata/config-diagnostics/invalid-provider/errors b/internal/configs/testdata/config-diagnostics/invalid-provider/errors new file mode 100644 index 000000000..359d47600 --- /dev/null +++ b/internal/configs/testdata/config-diagnostics/invalid-provider/errors @@ -0,0 +1 @@ +main.tf:1,1-20: Invalid provider local name; crash_es is an invalid provider local name diff --git a/internal/configs/testdata/config-diagnostics/invalid-provider/main.tf b/internal/configs/testdata/config-diagnostics/invalid-provider/main.tf new file mode 100644 index 000000000..ba8468469 --- /dev/null +++ b/internal/configs/testdata/config-diagnostics/invalid-provider/main.tf @@ -0,0 +1,3 @@ +module "mod" { + source = "./mod" +} diff --git a/internal/configs/testdata/config-diagnostics/invalid-provider/mod/main.tf b/internal/configs/testdata/config-diagnostics/invalid-provider/mod/main.tf new file mode 100644 index 000000000..f50ced1fe --- /dev/null +++ b/internal/configs/testdata/config-diagnostics/invalid-provider/mod/main.tf @@ -0,0 +1,2 @@ +provider "crash_es" { +} diff --git a/internal/configs/testdata/config-diagnostics/invalid-provider/warnings b/internal/configs/testdata/config-diagnostics/invalid-provider/warnings new file mode 100644 index 000000000..8bdc68eeb --- /dev/null +++ b/internal/configs/testdata/config-diagnostics/invalid-provider/warnings @@ -0,0 +1 @@ +Empty provider configuration blocks are not required From c647b41d658acb943d3538d0e90f141ffca8ec12 Mon Sep 17 00:00:00 2001 From: Barrett Clark Date: Fri, 19 Nov 2021 15:03:38 -0600 Subject: [PATCH 03/68] Add parallelism back into the tests Running tests in parallel can help speed up overall test execution. Go blocks parent tests while child tests run, so it does not fully fan out as you might expect. It is noticably faster, though. Running 4 or more concurrent processes knocks over a minute off the total execution time. --- internal/cloud/e2e/apply_auto_approve_test.go | 19 +++++----- .../e2e/backend_apply_before_init_test.go | 20 +++++----- internal/cloud/e2e/helper_test.go | 2 +- .../cloud/e2e/init_with_empty_tags_test.go | 20 +++++----- .../e2e/migrate_state_multi_to_tfc_test.go | 38 ++++++++++--------- ...igrate_state_remote_backend_to_tfc_test.go | 9 +++++ .../e2e/migrate_state_single_to_tfc_test.go | 19 +++++----- .../e2e/migrate_state_tfc_to_other_test.go | 20 +++++----- .../e2e/migrate_state_tfc_to_tfc_test.go | 35 ++++++++++------- internal/cloud/e2e/run_variables_test.go | 17 +++++---- 10 files changed, 112 insertions(+), 87 deletions(-) diff --git a/internal/cloud/e2e/apply_auto_approve_test.go b/internal/cloud/e2e/apply_auto_approve_test.go index c31a1ad2f..45afeaeb7 100644 --- a/internal/cloud/e2e/apply_auto_approve_test.go +++ b/internal/cloud/e2e/apply_auto_approve_test.go @@ -13,6 +13,7 @@ import ( ) func Test_terraform_apply_autoApprove(t *testing.T) { + t.Parallel() skipIfMissingEnvVar(t) skipWithoutRemoteTerraformVersion(t) @@ -180,20 +181,20 @@ func Test_terraform_apply_autoApprove(t *testing.T) { }, } for name, tc := range cases { - tc := tc - t.Run(name, func(t *testing.T) { - // t.Parallel() + tc := tc // rebind tc into this lexical scope + t.Run(name, func(subtest *testing.T) { + subtest.Parallel() organization, cleanup := createOrganization(t) defer cleanup() exp, err := expect.NewConsole(defaultOpts()...) if err != nil { - t.Fatal(err) + subtest.Fatal(err) } defer exp.Close() tmpDir, err := ioutil.TempDir("", "terraform-test") if err != nil { - t.Fatal(err) + subtest.Fatal(err) } defer os.RemoveAll(tmpDir) @@ -211,13 +212,13 @@ func Test_terraform_apply_autoApprove(t *testing.T) { err = cmd.Start() if err != nil { - t.Fatal(err) + subtest.Fatal(err) } if tfCmd.expectedCmdOutput != "" { got, err := exp.ExpectString(tfCmd.expectedCmdOutput) if err != nil { - t.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) + subtest.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) } } @@ -233,7 +234,7 @@ func Test_terraform_apply_autoApprove(t *testing.T) { output := tfCmd.postInputOutput[i] _, err := exp.ExpectString(output) if err != nil { - t.Fatal(err) + subtest.Fatal(err) } } } @@ -241,7 +242,7 @@ func Test_terraform_apply_autoApprove(t *testing.T) { err = cmd.Wait() if err != nil && !tfCmd.expectError { - t.Fatal(err) + subtest.Fatal(err) } } } diff --git a/internal/cloud/e2e/backend_apply_before_init_test.go b/internal/cloud/e2e/backend_apply_before_init_test.go index 9b4ff5508..b44354b91 100644 --- a/internal/cloud/e2e/backend_apply_before_init_test.go +++ b/internal/cloud/e2e/backend_apply_before_init_test.go @@ -10,8 +10,8 @@ import ( ) func Test_backend_apply_before_init(t *testing.T) { + t.Parallel() skipIfMissingEnvVar(t) - // t.Parallel() skipWithoutRemoteTerraformVersion(t) cases := map[string]struct { @@ -72,20 +72,20 @@ func Test_backend_apply_before_init(t *testing.T) { } for name, tc := range cases { - tc := tc - t.Run(name, func(t *testing.T) { - // t.Parallel() + tc := tc // rebind tc into this lexical scope + t.Run(name, func(subtest *testing.T) { + subtest.Parallel() organization, cleanup := createOrganization(t) defer cleanup() exp, err := expect.NewConsole(defaultOpts()...) if err != nil { - t.Fatal(err) + subtest.Fatal(err) } defer exp.Close() tmpDir, err := ioutil.TempDir("", "terraform-test") if err != nil { - t.Fatal(err) + subtest.Fatal(err) } defer os.RemoveAll(tmpDir) @@ -103,13 +103,13 @@ func Test_backend_apply_before_init(t *testing.T) { err = cmd.Start() if err != nil { - t.Fatal(err) + subtest.Fatal(err) } if tfCmd.expectedCmdOutput != "" { got, err := exp.ExpectString(tfCmd.expectedCmdOutput) if err != nil { - t.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) + subtest.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) } } @@ -125,7 +125,7 @@ func Test_backend_apply_before_init(t *testing.T) { output := tfCmd.postInputOutput[i] _, err := exp.ExpectString(output) if err != nil { - t.Fatal(err) + subtest.Fatal(err) } } } @@ -133,7 +133,7 @@ func Test_backend_apply_before_init(t *testing.T) { err = cmd.Wait() if err != nil && !tfCmd.expectError { - t.Fatal(err) + subtest.Fatal(err) } } } diff --git a/internal/cloud/e2e/helper_test.go b/internal/cloud/e2e/helper_test.go index 6c65ba983..9abfa9c30 100644 --- a/internal/cloud/e2e/helper_test.go +++ b/internal/cloud/e2e/helper_test.go @@ -15,7 +15,7 @@ import ( ) const ( - expectConsoleTimeout = 15 * time.Second + expectConsoleTimeout = 60 * time.Second * 3 ) type tfCommand struct { diff --git a/internal/cloud/e2e/init_with_empty_tags_test.go b/internal/cloud/e2e/init_with_empty_tags_test.go index c608e0973..5683363b8 100644 --- a/internal/cloud/e2e/init_with_empty_tags_test.go +++ b/internal/cloud/e2e/init_with_empty_tags_test.go @@ -10,8 +10,8 @@ import ( ) func Test_init_with_empty_tags(t *testing.T) { + t.Parallel() skipIfMissingEnvVar(t) - // t.Parallel() skipWithoutRemoteTerraformVersion(t) cases := map[string]struct { @@ -39,20 +39,20 @@ func Test_init_with_empty_tags(t *testing.T) { } for name, tc := range cases { - tc := tc - t.Run(name, func(t *testing.T) { - // t.Parallel() + tc := tc // rebind tc into this lexical scope + t.Run(name, func(subtest *testing.T) { + subtest.Parallel() organization, cleanup := createOrganization(t) defer cleanup() exp, err := expect.NewConsole(defaultOpts()...) if err != nil { - t.Fatal(err) + subtest.Fatal(err) } defer exp.Close() tmpDir, err := ioutil.TempDir("", "terraform-test") if err != nil { - t.Fatal(err) + subtest.Fatal(err) } defer os.RemoveAll(tmpDir) @@ -70,13 +70,13 @@ func Test_init_with_empty_tags(t *testing.T) { err = cmd.Start() if err != nil { - t.Fatal(err) + subtest.Fatal(err) } if tfCmd.expectedCmdOutput != "" { got, err := exp.ExpectString(tfCmd.expectedCmdOutput) if err != nil { - t.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) + subtest.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) } } @@ -92,14 +92,14 @@ func Test_init_with_empty_tags(t *testing.T) { output := tfCmd.postInputOutput[i] _, err := exp.ExpectString(output) if err != nil { - t.Fatal(err) + subtest.Fatal(err) } } } } err = cmd.Wait() if err != nil && !tfCmd.expectError { - t.Fatal(err) + subtest.Fatal(err) } } } diff --git a/internal/cloud/e2e/migrate_state_multi_to_tfc_test.go b/internal/cloud/e2e/migrate_state_multi_to_tfc_test.go index 74b750f2c..943da4ccd 100644 --- a/internal/cloud/e2e/migrate_state_multi_to_tfc_test.go +++ b/internal/cloud/e2e/migrate_state_multi_to_tfc_test.go @@ -13,6 +13,7 @@ import ( ) func Test_migrate_multi_to_tfc_cloud_name_strategy(t *testing.T) { + t.Parallel() skipIfMissingEnvVar(t) skipWithoutRemoteTerraformVersion(t) @@ -225,20 +226,20 @@ func Test_migrate_multi_to_tfc_cloud_name_strategy(t *testing.T) { } for name, tc := range cases { - tc := tc - t.Run(name, func(t *testing.T) { - // t.Parallel() + tc := tc // rebind tc into this lexical scope + t.Run(name, func(subtest *testing.T) { + subtest.Parallel() organization, cleanup := createOrganization(t) defer cleanup() exp, err := expect.NewConsole(defaultOpts()...) if err != nil { - t.Fatal(err) + subtest.Fatal(err) } defer exp.Close() tmpDir, err := ioutil.TempDir("", "terraform-test") if err != nil { - t.Fatal(err) + subtest.Fatal(err) } defer os.RemoveAll(tmpDir) @@ -256,13 +257,13 @@ func Test_migrate_multi_to_tfc_cloud_name_strategy(t *testing.T) { err = cmd.Start() if err != nil { - t.Fatal(err) + subtest.Fatal(err) } if tfCmd.expectedCmdOutput != "" { got, err := exp.ExpectString(tfCmd.expectedCmdOutput) if err != nil { - t.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) + subtest.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) } } @@ -278,7 +279,7 @@ func Test_migrate_multi_to_tfc_cloud_name_strategy(t *testing.T) { output := tfCmd.postInputOutput[i] _, err := exp.ExpectString(output) if err != nil { - t.Fatal(err) + subtest.Fatal(err) } } } @@ -286,7 +287,7 @@ func Test_migrate_multi_to_tfc_cloud_name_strategy(t *testing.T) { err = cmd.Wait() if err != nil && !tfCmd.expectError { - t.Fatal(err) + subtest.Fatal(err) } } } @@ -299,6 +300,7 @@ func Test_migrate_multi_to_tfc_cloud_name_strategy(t *testing.T) { } func Test_migrate_multi_to_tfc_cloud_tags_strategy(t *testing.T) { + t.Parallel() skipIfMissingEnvVar(t) skipWithoutRemoteTerraformVersion(t) @@ -513,20 +515,20 @@ func Test_migrate_multi_to_tfc_cloud_tags_strategy(t *testing.T) { } for name, tc := range cases { - tc := tc - t.Run(name, func(t *testing.T) { - // t.Parallel() + tc := tc // rebind tc into this lexical scope + t.Run(name, func(subtest *testing.T) { + subtest.Parallel() organization, cleanup := createOrganization(t) defer cleanup() exp, err := expect.NewConsole(defaultOpts()...) if err != nil { - t.Fatal(err) + subtest.Fatal(err) } defer exp.Close() tmpDir, err := ioutil.TempDir("", "terraform-test") if err != nil { - t.Fatal(err) + subtest.Fatal(err) } defer os.RemoveAll(tmpDir) @@ -544,13 +546,13 @@ func Test_migrate_multi_to_tfc_cloud_tags_strategy(t *testing.T) { err = cmd.Start() if err != nil { - t.Fatal(err) + subtest.Fatal(err) } if tfCmd.expectedCmdOutput != "" { got, err := exp.ExpectString(tfCmd.expectedCmdOutput) if err != nil { - t.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) + subtest.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) } } @@ -569,7 +571,7 @@ func Test_migrate_multi_to_tfc_cloud_tags_strategy(t *testing.T) { } _, err := exp.ExpectString(output) if err != nil { - t.Fatal(err) + subtest.Fatal(err) } } } @@ -577,7 +579,7 @@ func Test_migrate_multi_to_tfc_cloud_tags_strategy(t *testing.T) { err = cmd.Wait() if err != nil { - t.Fatal(err) + subtest.Fatal(err) } } } diff --git a/internal/cloud/e2e/migrate_state_remote_backend_to_tfc_test.go b/internal/cloud/e2e/migrate_state_remote_backend_to_tfc_test.go index 19d98ee63..831a716a0 100644 --- a/internal/cloud/e2e/migrate_state_remote_backend_to_tfc_test.go +++ b/internal/cloud/e2e/migrate_state_remote_backend_to_tfc_test.go @@ -12,6 +12,7 @@ import ( ) func Test_migrate_remote_backend_name_to_tfc_name(t *testing.T) { + t.Parallel() skipIfMissingEnvVar(t) skipWithoutRemoteTerraformVersion(t) @@ -136,8 +137,10 @@ func Test_migrate_remote_backend_name_to_tfc_name(t *testing.T) { } func Test_migrate_remote_backend_name_to_tfc_same_name(t *testing.T) { + t.Parallel() skipIfMissingEnvVar(t) skipWithoutRemoteTerraformVersion(t) + ctx := context.Background() operations := []operationSets{ { @@ -259,6 +262,7 @@ func Test_migrate_remote_backend_name_to_tfc_same_name(t *testing.T) { } func Test_migrate_remote_backend_name_to_tfc_name_different_org(t *testing.T) { + t.Parallel() skipIfMissingEnvVar(t) skipWithoutRemoteTerraformVersion(t) @@ -388,6 +392,7 @@ func Test_migrate_remote_backend_name_to_tfc_name_different_org(t *testing.T) { } func Test_migrate_remote_backend_name_to_tfc_tags(t *testing.T) { + t.Parallel() skipIfMissingEnvVar(t) skipWithoutRemoteTerraformVersion(t) @@ -522,6 +527,7 @@ func Test_migrate_remote_backend_name_to_tfc_tags(t *testing.T) { } func Test_migrate_remote_backend_prefix_to_tfc_name_strategy_single_workspace(t *testing.T) { + t.Parallel() skipIfMissingEnvVar(t) skipWithoutRemoteTerraformVersion(t) @@ -647,6 +653,7 @@ func Test_migrate_remote_backend_prefix_to_tfc_name_strategy_single_workspace(t } func Test_migrate_remote_backend_prefix_to_tfc_name_strategy_multi_workspace(t *testing.T) { + t.Parallel() skipIfMissingEnvVar(t) skipWithoutRemoteTerraformVersion(t) @@ -801,6 +808,7 @@ func Test_migrate_remote_backend_prefix_to_tfc_name_strategy_multi_workspace(t * } func Test_migrate_remote_backend_prefix_to_tfc_tags_strategy_single_workspace(t *testing.T) { + t.Parallel() skipIfMissingEnvVar(t) skipWithoutRemoteTerraformVersion(t) @@ -927,6 +935,7 @@ func Test_migrate_remote_backend_prefix_to_tfc_tags_strategy_single_workspace(t } func Test_migrate_remote_backend_prefix_to_tfc_tags_strategy_multi_workspace(t *testing.T) { + t.Parallel() skipIfMissingEnvVar(t) skipWithoutRemoteTerraformVersion(t) diff --git a/internal/cloud/e2e/migrate_state_single_to_tfc_test.go b/internal/cloud/e2e/migrate_state_single_to_tfc_test.go index 064256aac..edf0c0376 100644 --- a/internal/cloud/e2e/migrate_state_single_to_tfc_test.go +++ b/internal/cloud/e2e/migrate_state_single_to_tfc_test.go @@ -12,6 +12,7 @@ import ( ) func Test_migrate_single_to_tfc(t *testing.T) { + t.Parallel() skipIfMissingEnvVar(t) skipWithoutRemoteTerraformVersion(t) @@ -129,20 +130,20 @@ func Test_migrate_single_to_tfc(t *testing.T) { } for name, tc := range cases { - tc := tc - t.Run(name, func(t *testing.T) { - // t.Parallel() + tc := tc // rebind tc into this lexical scope + t.Run(name, func(subtest *testing.T) { + subtest.Parallel() organization, cleanup := createOrganization(t) defer cleanup() exp, err := expect.NewConsole(defaultOpts()...) if err != nil { - t.Fatal(err) + subtest.Fatal(err) } defer exp.Close() tmpDir, err := ioutil.TempDir("", "terraform-test") if err != nil { - t.Fatal(err) + subtest.Fatal(err) } defer os.RemoveAll(tmpDir) @@ -160,13 +161,13 @@ func Test_migrate_single_to_tfc(t *testing.T) { err = cmd.Start() if err != nil { - t.Fatal(err) + subtest.Fatal(err) } if tfCmd.expectedCmdOutput != "" { got, err := exp.ExpectString(tfCmd.expectedCmdOutput) if err != nil { - t.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) + subtest.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) } } @@ -182,7 +183,7 @@ func Test_migrate_single_to_tfc(t *testing.T) { output := tfCmd.postInputOutput[i] _, err := exp.ExpectString(output) if err != nil { - t.Fatal(err) + subtest.Fatal(err) } } } @@ -190,7 +191,7 @@ func Test_migrate_single_to_tfc(t *testing.T) { err = cmd.Wait() if err != nil { - t.Fatal(err) + subtest.Fatal(err) } } } diff --git a/internal/cloud/e2e/migrate_state_tfc_to_other_test.go b/internal/cloud/e2e/migrate_state_tfc_to_other_test.go index 4840a0f85..3e0be6894 100644 --- a/internal/cloud/e2e/migrate_state_tfc_to_other_test.go +++ b/internal/cloud/e2e/migrate_state_tfc_to_other_test.go @@ -10,7 +10,9 @@ import ( ) func Test_migrate_tfc_to_other(t *testing.T) { + t.Parallel() skipIfMissingEnvVar(t) + cases := map[string]struct { operations []operationSets }{ @@ -47,20 +49,20 @@ func Test_migrate_tfc_to_other(t *testing.T) { } for name, tc := range cases { - tc := tc - t.Run(name, func(t *testing.T) { - // t.Parallel() + tc := tc // rebind tc into this lexical scope + t.Run(name, func(subtest *testing.T) { + subtest.Parallel() organization, cleanup := createOrganization(t) defer cleanup() exp, err := expect.NewConsole(defaultOpts()...) if err != nil { - t.Fatal(err) + subtest.Fatal(err) } defer exp.Close() tmpDir, err := ioutil.TempDir("", "terraform-test") if err != nil { - t.Fatal(err) + subtest.Fatal(err) } defer os.RemoveAll(tmpDir) @@ -78,13 +80,13 @@ func Test_migrate_tfc_to_other(t *testing.T) { err = cmd.Start() if err != nil { - t.Fatal(err) + subtest.Fatal(err) } if tfCmd.expectedCmdOutput != "" { got, err := exp.ExpectString(tfCmd.expectedCmdOutput) if err != nil { - t.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) + subtest.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) } } @@ -100,14 +102,14 @@ func Test_migrate_tfc_to_other(t *testing.T) { output := tfCmd.postInputOutput[i] _, err := exp.ExpectString(output) if err != nil { - t.Fatal(err) + subtest.Fatal(err) } } } } err = cmd.Wait() if err != nil && !tfCmd.expectError { - t.Fatal(err) + subtest.Fatal(err) } } } diff --git a/internal/cloud/e2e/migrate_state_tfc_to_tfc_test.go b/internal/cloud/e2e/migrate_state_tfc_to_tfc_test.go index f12dca418..123534a95 100644 --- a/internal/cloud/e2e/migrate_state_tfc_to_tfc_test.go +++ b/internal/cloud/e2e/migrate_state_tfc_to_tfc_test.go @@ -13,8 +13,10 @@ import ( ) func Test_migrate_tfc_to_tfc_single_workspace(t *testing.T) { + t.Parallel() skipIfMissingEnvVar(t) skipWithoutRemoteTerraformVersion(t) + ctx := context.Background() cases := map[string]struct { @@ -219,16 +221,18 @@ func Test_migrate_tfc_to_tfc_single_workspace(t *testing.T) { } for name, tc := range cases { - t.Run(name, func(t *testing.T) { + tc := tc // rebind tc into this lexical scope + t.Run(name, func(subtest *testing.T) { + subtest.Parallel() exp, err := expect.NewConsole(defaultOpts()...) if err != nil { - t.Fatal(err) + subtest.Fatal(err) } defer exp.Close() tmpDir, err := ioutil.TempDir("", "terraform-test") if err != nil { - t.Fatal(err) + subtest.Fatal(err) } defer os.RemoveAll(tmpDir) @@ -248,13 +252,13 @@ func Test_migrate_tfc_to_tfc_single_workspace(t *testing.T) { err = cmd.Start() if err != nil { - t.Fatal(err) + subtest.Fatal(err) } if tfCmd.expectedCmdOutput != "" { got, err := exp.ExpectString(tfCmd.expectedCmdOutput) if err != nil { - t.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) + subtest.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) } } @@ -270,7 +274,7 @@ func Test_migrate_tfc_to_tfc_single_workspace(t *testing.T) { output := tfCmd.postInputOutput[i] _, err := exp.ExpectString(output) if err != nil { - t.Fatal(err) + subtest.Fatal(err) } } } @@ -278,7 +282,7 @@ func Test_migrate_tfc_to_tfc_single_workspace(t *testing.T) { err = cmd.Wait() if err != nil && !tfCmd.expectError { - t.Fatal(err.Error()) + subtest.Fatal(err.Error()) } } } @@ -291,6 +295,7 @@ func Test_migrate_tfc_to_tfc_single_workspace(t *testing.T) { } func Test_migrate_tfc_to_tfc_multiple_workspace(t *testing.T) { + t.Parallel() skipIfMissingEnvVar(t) skipWithoutRemoteTerraformVersion(t) @@ -464,16 +469,18 @@ func Test_migrate_tfc_to_tfc_multiple_workspace(t *testing.T) { } for name, tc := range cases { - t.Run(name, func(t *testing.T) { + tc := tc // rebind tc into this lexical scope + t.Run(name, func(subtest *testing.T) { + subtest.Parallel() exp, err := expect.NewConsole(defaultOpts()...) if err != nil { - t.Fatal(err) + subtest.Fatal(err) } defer exp.Close() tmpDir, err := ioutil.TempDir("", "terraform-test") if err != nil { - t.Fatal(err) + subtest.Fatal(err) } defer os.RemoveAll(tmpDir) @@ -493,13 +500,13 @@ func Test_migrate_tfc_to_tfc_multiple_workspace(t *testing.T) { err = cmd.Start() if err != nil { - t.Fatal(err) + subtest.Fatal(err) } if tfCmd.expectedCmdOutput != "" { got, err := exp.ExpectString(tfCmd.expectedCmdOutput) if err != nil { - t.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) + subtest.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) } } @@ -515,7 +522,7 @@ func Test_migrate_tfc_to_tfc_multiple_workspace(t *testing.T) { output := tfCmd.postInputOutput[i] _, err := exp.ExpectString(output) if err != nil { - t.Fatal(err) + subtest.Fatal(err) } } } @@ -523,7 +530,7 @@ func Test_migrate_tfc_to_tfc_multiple_workspace(t *testing.T) { err = cmd.Wait() if err != nil { - t.Fatal(err.Error()) + subtest.Fatal(err.Error()) } } } diff --git a/internal/cloud/e2e/run_variables_test.go b/internal/cloud/e2e/run_variables_test.go index 77cc1514d..54fcb38a6 100644 --- a/internal/cloud/e2e/run_variables_test.go +++ b/internal/cloud/e2e/run_variables_test.go @@ -45,6 +45,7 @@ output "test_env" { } func Test_cloud_run_variables(t *testing.T) { + t.Parallel() skipIfMissingEnvVar(t) skipWithoutRemoteTerraformVersion(t) @@ -81,18 +82,20 @@ func Test_cloud_run_variables(t *testing.T) { } for name, tc := range cases { - t.Run(name, func(t *testing.T) { + tc := tc // rebind tc into this lexical scope + t.Run(name, func(subtest *testing.T) { + subtest.Parallel() organization, cleanup := createOrganization(t) defer cleanup() exp, err := expect.NewConsole(defaultOpts()...) if err != nil { - t.Fatal(err) + subtest.Fatal(err) } defer exp.Close() tmpDir, err := ioutil.TempDir("", "terraform-test") if err != nil { - t.Fatal(err) + subtest.Fatal(err) } defer os.RemoveAll(tmpDir) @@ -112,13 +115,13 @@ func Test_cloud_run_variables(t *testing.T) { err = cmd.Start() if err != nil { - t.Fatal(err) + subtest.Fatal(err) } if tfCmd.expectedCmdOutput != "" { got, err := exp.ExpectString(tfCmd.expectedCmdOutput) if err != nil { - t.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) + subtest.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) } } @@ -134,7 +137,7 @@ func Test_cloud_run_variables(t *testing.T) { output := tfCmd.postInputOutput[i] _, err := exp.ExpectString(output) if err != nil { - t.Fatalf(`Expected command output "%s", but got %v `, tfCmd.expectedCmdOutput, err) + subtest.Fatalf(`Expected command output "%s", but got %v `, tfCmd.expectedCmdOutput, err) } } } @@ -142,7 +145,7 @@ func Test_cloud_run_variables(t *testing.T) { err = cmd.Wait() if err != nil && !tfCmd.expectError { - t.Fatal(err) + subtest.Fatal(err) } } From 768741c0f71f171f9fc873601dbb0ea77bab554d Mon Sep 17 00:00:00 2001 From: Alisdair McDiarmid Date: Fri, 17 Dec 2021 17:46:42 -0500 Subject: [PATCH 04/68] command/show: Disable plan state lineage checks When showing a saved plan, we do not need to check the state lineage against current state, because the plan cannot be applied. This is relevant when plan and apply specify a `-state` argument to choose a non-default state file. In this case, the stored prior state in the plan will not match the default state file, so a lineage check will always error. --- internal/backend/backend.go | 7 ++++ internal/backend/local/backend_local.go | 2 +- internal/command/show.go | 1 + internal/command/show_test.go | 48 +++++++++++++++++++++++++ 4 files changed, 57 insertions(+), 1 deletion(-) diff --git a/internal/backend/backend.go b/internal/backend/backend.go index 4124b2abd..0e1daef40 100644 --- a/internal/backend/backend.go +++ b/internal/backend/backend.go @@ -275,6 +275,13 @@ type Operation struct { // the variables set in the plan are used instead, and they must be valid. AllowUnsetVariables bool + // When loading a plan file for a read-only operation, we may want to + // disable the state lineage checks which are only relevant for operations + // which can modify state. An example where this is important is showing + // a plan which was prepared against a non-default state file, because the + // lineage checks are always against the default state. + DisablePlanFileStateLineageChecks bool + // View implements the logic for all UI interactions. View views.Operation diff --git a/internal/backend/local/backend_local.go b/internal/backend/local/backend_local.go index 6082bfdf6..a4a4fb67e 100644 --- a/internal/backend/local/backend_local.go +++ b/internal/backend/local/backend_local.go @@ -284,7 +284,7 @@ func (b *Local) localRunForPlanFile(op *backend.Operation, pf *planfile.Reader, )) return nil, snap, diags } - if currentStateMeta != nil { + if !op.DisablePlanFileStateLineageChecks && currentStateMeta != nil { // If the caller sets this, we require that the stored prior state // has the same metadata, which is an extra safety check that nothing // has changed since the plan was created. (All of the "real-world" diff --git a/internal/command/show.go b/internal/command/show.go index 6ae66beeb..728ea9872 100644 --- a/internal/command/show.go +++ b/internal/command/show.go @@ -94,6 +94,7 @@ func (c *ShowCommand) Run(args []string) int { opReq.PlanFile = planFile opReq.ConfigLoader, err = c.initConfigLoader() opReq.AllowUnsetVariables = true + opReq.DisablePlanFileStateLineageChecks = true if err != nil { diags = diags.Append(err) c.showDiagnostics(diags) diff --git a/internal/command/show_test.go b/internal/command/show_test.go index ea266d2cb..25504d2c2 100644 --- a/internal/command/show_test.go +++ b/internal/command/show_test.go @@ -15,7 +15,9 @@ import ( "github.com/hashicorp/terraform/internal/plans" "github.com/hashicorp/terraform/internal/providers" "github.com/hashicorp/terraform/internal/states" + "github.com/hashicorp/terraform/internal/states/statemgr" "github.com/hashicorp/terraform/internal/terraform" + "github.com/hashicorp/terraform/version" "github.com/mitchellh/cli" "github.com/zclconf/go-cty/cty" ) @@ -575,6 +577,52 @@ func TestShow_json_output_state(t *testing.T) { } } +func TestShow_planWithNonDefaultStateLineage(t *testing.T) { + // Create a temporary working directory that is empty + td := tempDir(t) + testCopyDir(t, testFixturePath("show"), td) + defer os.RemoveAll(td) + defer testChdir(t, td)() + + // Write default state file with a testing lineage ("fake-for-testing") + testStateFileDefault(t, testState()) + + // Create a plan with a different lineage, which we should still be able + // to show + _, snap := testModuleWithSnapshot(t, "show") + state := testState() + plan := testPlan(t) + stateMeta := statemgr.SnapshotMeta{ + Lineage: "fake-for-plan", + Serial: 1, + TerraformVersion: version.SemVer, + } + planPath := testPlanFileMatchState(t, snap, state, plan, stateMeta) + + ui := cli.NewMockUi() + view, done := testView(t) + c := &ShowCommand{ + Meta: Meta{ + testingOverrides: metaOverridesForProvider(testProvider()), + Ui: ui, + View: view, + }, + } + + args := []string{ + planPath, + } + if code := c.Run(args); code != 0 { + t.Fatalf("bad: \n%s", ui.ErrorWriter.String()) + } + + want := `No changes. Your infrastructure matches the configuration.` + got := done(t).Stdout() + if !strings.Contains(got, want) { + t.Errorf("missing expected output\nwant: %s\ngot:\n%s", want, got) + } +} + // showFixtureSchema returns a schema suitable for processing the configuration // in testdata/show. This schema should be assigned to a mock provider // named "test". From d196d2870a2e77f4ec65efe2359ed0d6919b10f7 Mon Sep 17 00:00:00 2001 From: Barrett Clark Date: Thu, 16 Dec 2021 14:23:42 -0600 Subject: [PATCH 05/68] Refactor cloud table test runs As the cloud e2e tests evolved some common patters became apparent. This standardizes and consolidates the patterns into a common test runner that takes the table tests and runs them in parallel. Some tests also needed to be converted to utilize table tests. --- internal/cloud/e2e/apply_auto_approve_test.go | 81 +- .../e2e/backend_apply_before_init_test.go | 78 +- internal/cloud/e2e/helper_test.go | 4 +- .../cloud/e2e/init_with_empty_tags_test.go | 77 +- internal/cloud/e2e/main_test.go | 92 + .../e2e/migrate_state_multi_to_tfc_test.go | 158 +- ...igrate_state_remote_backend_to_tfc_test.go | 1531 ++++++----------- .../e2e/migrate_state_single_to_tfc_test.go | 82 +- .../e2e/migrate_state_tfc_to_other_test.go | 77 +- .../e2e/migrate_state_tfc_to_tfc_test.go | 182 +- internal/cloud/e2e/run_variables_test.go | 79 +- 11 files changed, 594 insertions(+), 1847 deletions(-) diff --git a/internal/cloud/e2e/apply_auto_approve_test.go b/internal/cloud/e2e/apply_auto_approve_test.go index 45afeaeb7..816e70035 100644 --- a/internal/cloud/e2e/apply_auto_approve_test.go +++ b/internal/cloud/e2e/apply_auto_approve_test.go @@ -2,13 +2,9 @@ package main import ( "context" - "io/ioutil" - "os" "testing" - expect "github.com/Netflix/go-expect" tfe "github.com/hashicorp/go-tfe" - "github.com/hashicorp/terraform/internal/e2e" tfversion "github.com/hashicorp/terraform/version" ) @@ -19,10 +15,7 @@ func Test_terraform_apply_autoApprove(t *testing.T) { ctx := context.Background() - cases := map[string]struct { - operations []operationSets - validations func(t *testing.T, orgName string) - }{ + cases := testCases{ "workspace manual apply, terraform apply without auto-approve, expect prompt": { operations: []operationSets{ { @@ -180,76 +173,6 @@ func Test_terraform_apply_autoApprove(t *testing.T) { }, }, } - for name, tc := range cases { - tc := tc // rebind tc into this lexical scope - t.Run(name, func(subtest *testing.T) { - subtest.Parallel() - organization, cleanup := createOrganization(t) - defer cleanup() - exp, err := expect.NewConsole(defaultOpts()...) - if err != nil { - subtest.Fatal(err) - } - defer exp.Close() - tmpDir, err := ioutil.TempDir("", "terraform-test") - if err != nil { - subtest.Fatal(err) - } - defer os.RemoveAll(tmpDir) - - tf := e2e.NewBinary(terraformBin, tmpDir) - tf.AddEnv(cliConfigFileEnv) - defer tf.Close() - - for _, op := range tc.operations { - op.prep(t, organization.Name, tf.WorkDir()) - for _, tfCmd := range op.commands { - cmd := tf.Cmd(tfCmd.command...) - cmd.Stdin = exp.Tty() - cmd.Stdout = exp.Tty() - cmd.Stderr = exp.Tty() - - err = cmd.Start() - if err != nil { - subtest.Fatal(err) - } - - if tfCmd.expectedCmdOutput != "" { - got, err := exp.ExpectString(tfCmd.expectedCmdOutput) - if err != nil { - subtest.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) - } - } - - lenInput := len(tfCmd.userInput) - lenInputOutput := len(tfCmd.postInputOutput) - if lenInput > 0 { - for i := 0; i < lenInput; i++ { - input := tfCmd.userInput[i] - exp.SendLine(input) - // use the index to find the corresponding - // output that matches the input. - if lenInputOutput-1 >= i { - output := tfCmd.postInputOutput[i] - _, err := exp.ExpectString(output) - if err != nil { - subtest.Fatal(err) - } - } - } - } - - err = cmd.Wait() - if err != nil && !tfCmd.expectError { - subtest.Fatal(err) - } - } - } - - if tc.validations != nil { - tc.validations(t, organization.Name) - } - }) - } + testRunner(t, cases, 1) } diff --git a/internal/cloud/e2e/backend_apply_before_init_test.go b/internal/cloud/e2e/backend_apply_before_init_test.go index b44354b91..390e54a42 100644 --- a/internal/cloud/e2e/backend_apply_before_init_test.go +++ b/internal/cloud/e2e/backend_apply_before_init_test.go @@ -1,12 +1,7 @@ package main import ( - "io/ioutil" - "os" "testing" - - expect "github.com/Netflix/go-expect" - "github.com/hashicorp/terraform/internal/e2e" ) func Test_backend_apply_before_init(t *testing.T) { @@ -14,9 +9,7 @@ func Test_backend_apply_before_init(t *testing.T) { skipIfMissingEnvVar(t) skipWithoutRemoteTerraformVersion(t) - cases := map[string]struct { - operations []operationSets - }{ + cases := testCases{ "terraform apply with cloud block - blank state": { operations: []operationSets{ { @@ -71,72 +64,5 @@ func Test_backend_apply_before_init(t *testing.T) { }, } - for name, tc := range cases { - tc := tc // rebind tc into this lexical scope - t.Run(name, func(subtest *testing.T) { - subtest.Parallel() - organization, cleanup := createOrganization(t) - defer cleanup() - exp, err := expect.NewConsole(defaultOpts()...) - if err != nil { - subtest.Fatal(err) - } - defer exp.Close() - - tmpDir, err := ioutil.TempDir("", "terraform-test") - if err != nil { - subtest.Fatal(err) - } - defer os.RemoveAll(tmpDir) - - tf := e2e.NewBinary(terraformBin, tmpDir) - tf.AddEnv(cliConfigFileEnv) - defer tf.Close() - - for _, op := range tc.operations { - op.prep(t, organization.Name, tf.WorkDir()) - for _, tfCmd := range op.commands { - cmd := tf.Cmd(tfCmd.command...) - cmd.Stdin = exp.Tty() - cmd.Stdout = exp.Tty() - cmd.Stderr = exp.Tty() - - err = cmd.Start() - if err != nil { - subtest.Fatal(err) - } - - if tfCmd.expectedCmdOutput != "" { - got, err := exp.ExpectString(tfCmd.expectedCmdOutput) - if err != nil { - subtest.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) - } - } - - lenInput := len(tfCmd.userInput) - lenInputOutput := len(tfCmd.postInputOutput) - if lenInput > 0 { - for i := 0; i < lenInput; i++ { - input := tfCmd.userInput[i] - exp.SendLine(input) - // use the index to find the corresponding - // output that matches the input. - if lenInputOutput-1 >= i { - output := tfCmd.postInputOutput[i] - _, err := exp.ExpectString(output) - if err != nil { - subtest.Fatal(err) - } - } - } - } - - err = cmd.Wait() - if err != nil && !tfCmd.expectError { - subtest.Fatal(err) - } - } - } - }) - } + testRunner(t, cases, 1) } diff --git a/internal/cloud/e2e/helper_test.go b/internal/cloud/e2e/helper_test.go index 9abfa9c30..eb464cdb1 100644 --- a/internal/cloud/e2e/helper_test.go +++ b/internal/cloud/e2e/helper_test.go @@ -15,7 +15,9 @@ import ( ) const ( - expectConsoleTimeout = 60 * time.Second * 3 + // We need to give the console enough time to hear back. + // 1 minute was too short in some cases, so this gives it ample time. + expectConsoleTimeout = 3 * time.Minute ) type tfCommand struct { diff --git a/internal/cloud/e2e/init_with_empty_tags_test.go b/internal/cloud/e2e/init_with_empty_tags_test.go index 5683363b8..016aad50c 100644 --- a/internal/cloud/e2e/init_with_empty_tags_test.go +++ b/internal/cloud/e2e/init_with_empty_tags_test.go @@ -1,12 +1,7 @@ package main import ( - "io/ioutil" - "os" "testing" - - expect "github.com/Netflix/go-expect" - "github.com/hashicorp/terraform/internal/e2e" ) func Test_init_with_empty_tags(t *testing.T) { @@ -14,9 +9,7 @@ func Test_init_with_empty_tags(t *testing.T) { skipIfMissingEnvVar(t) skipWithoutRemoteTerraformVersion(t) - cases := map[string]struct { - operations []operationSets - }{ + cases := testCases{ "terraform init with cloud block - no tagged workspaces exist yet": { operations: []operationSets{ { @@ -38,71 +31,5 @@ func Test_init_with_empty_tags(t *testing.T) { }, } - for name, tc := range cases { - tc := tc // rebind tc into this lexical scope - t.Run(name, func(subtest *testing.T) { - subtest.Parallel() - organization, cleanup := createOrganization(t) - defer cleanup() - exp, err := expect.NewConsole(defaultOpts()...) - if err != nil { - subtest.Fatal(err) - } - defer exp.Close() - - tmpDir, err := ioutil.TempDir("", "terraform-test") - if err != nil { - subtest.Fatal(err) - } - defer os.RemoveAll(tmpDir) - - tf := e2e.NewBinary(terraformBin, tmpDir) - tf.AddEnv(cliConfigFileEnv) - defer tf.Close() - - for _, op := range tc.operations { - op.prep(t, organization.Name, tf.WorkDir()) - for _, tfCmd := range op.commands { - cmd := tf.Cmd(tfCmd.command...) - cmd.Stdin = exp.Tty() - cmd.Stdout = exp.Tty() - cmd.Stderr = exp.Tty() - - err = cmd.Start() - if err != nil { - subtest.Fatal(err) - } - - if tfCmd.expectedCmdOutput != "" { - got, err := exp.ExpectString(tfCmd.expectedCmdOutput) - if err != nil { - subtest.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) - } - } - - lenInput := len(tfCmd.userInput) - lenInputOutput := len(tfCmd.postInputOutput) - if lenInput > 0 { - for i := 0; i < lenInput; i++ { - input := tfCmd.userInput[i] - exp.SendLine(input) - // use the index to find the corresponding - // output that matches the input. - if lenInputOutput-1 >= i { - output := tfCmd.postInputOutput[i] - _, err := exp.ExpectString(output) - if err != nil { - subtest.Fatal(err) - } - } - } - } - err = cmd.Wait() - if err != nil && !tfCmd.expectError { - subtest.Fatal(err) - } - } - } - }) - } + testRunner(t, cases, 1) } diff --git a/internal/cloud/e2e/main_test.go b/internal/cloud/e2e/main_test.go index d758eb65d..4bfe61429 100644 --- a/internal/cloud/e2e/main_test.go +++ b/internal/cloud/e2e/main_test.go @@ -10,7 +10,9 @@ import ( "strings" "testing" + expect "github.com/Netflix/go-expect" tfe "github.com/hashicorp/go-tfe" + "github.com/hashicorp/terraform/internal/e2e" tfversion "github.com/hashicorp/terraform/version" ) @@ -66,6 +68,96 @@ func setup() func() { teardown() } } +func testRunner(t *testing.T, cases testCases, orgCount int, tfEnvFlags ...string) { + for name, tc := range cases { + tc := tc // rebind tc into this lexical scope + t.Run(name, func(subtest *testing.T) { + subtest.Parallel() + + orgNames := []string{} + for i := 0; i < orgCount; i++ { + organization, cleanup := createOrganization(t) + t.Cleanup(cleanup) + orgNames = append(orgNames, organization.Name) + } + + exp, err := expect.NewConsole(defaultOpts()...) + if err != nil { + subtest.Fatal(err) + } + defer exp.Close() + + tmpDir, err := ioutil.TempDir("", "terraform-test") + if err != nil { + subtest.Fatal(err) + } + defer os.RemoveAll(tmpDir) + + tf := e2e.NewBinary(terraformBin, tmpDir) + tfEnvFlags = append(tfEnvFlags, "TF_LOG=INFO") + tfEnvFlags = append(tfEnvFlags, cliConfigFileEnv) + for _, env := range tfEnvFlags { + tf.AddEnv(env) + } + defer tf.Close() + + var orgName string + for index, op := range tc.operations { + if orgCount == 1 { + orgName = orgNames[0] + } else { + orgName = orgNames[index] + } + op.prep(t, orgName, tf.WorkDir()) + for _, tfCmd := range op.commands { + cmd := tf.Cmd(tfCmd.command...) + cmd.Stdin = exp.Tty() + cmd.Stdout = exp.Tty() + cmd.Stderr = exp.Tty() + + err = cmd.Start() + if err != nil { + subtest.Fatal(err) + } + + if tfCmd.expectedCmdOutput != "" { + got, err := exp.ExpectString(tfCmd.expectedCmdOutput) + if err != nil { + subtest.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) + } + } + + lenInput := len(tfCmd.userInput) + lenInputOutput := len(tfCmd.postInputOutput) + if lenInput > 0 { + for i := 0; i < lenInput; i++ { + input := tfCmd.userInput[i] + exp.SendLine(input) + // use the index to find the corresponding + // output that matches the input. + if lenInputOutput-1 >= i { + output := tfCmd.postInputOutput[i] + _, err := exp.ExpectString(output) + if err != nil { + subtest.Fatal(err) + } + } + } + } + + err = cmd.Wait() + if err != nil && !tfCmd.expectError { + subtest.Fatal(err) + } + } + } + + if tc.validations != nil { + tc.validations(t, orgName) + } + }) + } +} func setTfeClient() { tfeHostname = os.Getenv("TFE_HOSTNAME") diff --git a/internal/cloud/e2e/migrate_state_multi_to_tfc_test.go b/internal/cloud/e2e/migrate_state_multi_to_tfc_test.go index 943da4ccd..b2f4ed0c5 100644 --- a/internal/cloud/e2e/migrate_state_multi_to_tfc_test.go +++ b/internal/cloud/e2e/migrate_state_multi_to_tfc_test.go @@ -2,13 +2,9 @@ package main import ( "context" - "io/ioutil" - "os" "testing" - expect "github.com/Netflix/go-expect" tfe "github.com/hashicorp/go-tfe" - "github.com/hashicorp/terraform/internal/e2e" tfversion "github.com/hashicorp/terraform/version" ) @@ -19,10 +15,7 @@ func Test_migrate_multi_to_tfc_cloud_name_strategy(t *testing.T) { ctx := context.Background() - cases := map[string]struct { - operations []operationSets - validations func(t *testing.T, orgName string) - }{ + cases := testCases{ "migrating multiple workspaces to cloud using name strategy; current workspace is 'default'": { operations: []operationSets{ { @@ -225,78 +218,7 @@ func Test_migrate_multi_to_tfc_cloud_name_strategy(t *testing.T) { }, } - for name, tc := range cases { - tc := tc // rebind tc into this lexical scope - t.Run(name, func(subtest *testing.T) { - subtest.Parallel() - organization, cleanup := createOrganization(t) - defer cleanup() - exp, err := expect.NewConsole(defaultOpts()...) - if err != nil { - subtest.Fatal(err) - } - defer exp.Close() - - tmpDir, err := ioutil.TempDir("", "terraform-test") - if err != nil { - subtest.Fatal(err) - } - defer os.RemoveAll(tmpDir) - - tf := e2e.NewBinary(terraformBin, tmpDir) - defer tf.Close() - tf.AddEnv(cliConfigFileEnv) - - for _, op := range tc.operations { - op.prep(t, organization.Name, tf.WorkDir()) - for _, tfCmd := range op.commands { - cmd := tf.Cmd(tfCmd.command...) - cmd.Stdin = exp.Tty() - cmd.Stdout = exp.Tty() - cmd.Stderr = exp.Tty() - - err = cmd.Start() - if err != nil { - subtest.Fatal(err) - } - - if tfCmd.expectedCmdOutput != "" { - got, err := exp.ExpectString(tfCmd.expectedCmdOutput) - if err != nil { - subtest.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) - } - } - - lenInput := len(tfCmd.userInput) - lenInputOutput := len(tfCmd.postInputOutput) - if lenInput > 0 { - for i := 0; i < lenInput; i++ { - input := tfCmd.userInput[i] - exp.SendLine(input) - // use the index to find the corresponding - // output that matches the input. - if lenInputOutput-1 >= i { - output := tfCmd.postInputOutput[i] - _, err := exp.ExpectString(output) - if err != nil { - subtest.Fatal(err) - } - } - } - } - - err = cmd.Wait() - if err != nil && !tfCmd.expectError { - subtest.Fatal(err) - } - } - } - - if tc.validations != nil { - tc.validations(t, organization.Name) - } - }) - } + testRunner(t, cases, 1) } func Test_migrate_multi_to_tfc_cloud_tags_strategy(t *testing.T) { @@ -514,79 +436,5 @@ func Test_migrate_multi_to_tfc_cloud_tags_strategy(t *testing.T) { }, } - for name, tc := range cases { - tc := tc // rebind tc into this lexical scope - t.Run(name, func(subtest *testing.T) { - subtest.Parallel() - organization, cleanup := createOrganization(t) - defer cleanup() - exp, err := expect.NewConsole(defaultOpts()...) - if err != nil { - subtest.Fatal(err) - } - defer exp.Close() - - tmpDir, err := ioutil.TempDir("", "terraform-test") - if err != nil { - subtest.Fatal(err) - } - defer os.RemoveAll(tmpDir) - - tf := e2e.NewBinary(terraformBin, tmpDir) - defer tf.Close() - tf.AddEnv(cliConfigFileEnv) - - for _, op := range tc.operations { - op.prep(t, organization.Name, tf.WorkDir()) - for _, tfCmd := range op.commands { - cmd := tf.Cmd(tfCmd.command...) - cmd.Stdin = exp.Tty() - cmd.Stdout = exp.Tty() - cmd.Stderr = exp.Tty() - - err = cmd.Start() - if err != nil { - subtest.Fatal(err) - } - - if tfCmd.expectedCmdOutput != "" { - got, err := exp.ExpectString(tfCmd.expectedCmdOutput) - if err != nil { - subtest.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) - } - } - - lenInput := len(tfCmd.userInput) - lenInputOutput := len(tfCmd.postInputOutput) - if lenInput > 0 { - for i := 0; i < lenInput; i++ { - input := tfCmd.userInput[i] - exp.SendLine(input) - // use the index to find the corresponding - // output that matches the input. - if lenInputOutput-1 >= i { - output := tfCmd.postInputOutput[i] - if output == "" { - continue - } - _, err := exp.ExpectString(output) - if err != nil { - subtest.Fatal(err) - } - } - } - } - - err = cmd.Wait() - if err != nil { - subtest.Fatal(err) - } - } - } - - if tc.validations != nil { - tc.validations(t, organization.Name) - } - }) - } + testRunner(t, cases, 1) } diff --git a/internal/cloud/e2e/migrate_state_remote_backend_to_tfc_test.go b/internal/cloud/e2e/migrate_state_remote_backend_to_tfc_test.go index 831a716a0..2ae9345e2 100644 --- a/internal/cloud/e2e/migrate_state_remote_backend_to_tfc_test.go +++ b/internal/cloud/e2e/migrate_state_remote_backend_to_tfc_test.go @@ -2,1091 +2,522 @@ package main import ( "context" - "io/ioutil" - "os" "testing" - expect "github.com/Netflix/go-expect" tfe "github.com/hashicorp/go-tfe" - "github.com/hashicorp/terraform/internal/e2e" ) -func Test_migrate_remote_backend_name_to_tfc_name(t *testing.T) { +func Test_migrate_remote_backend_single_org(t *testing.T) { t.Parallel() skipIfMissingEnvVar(t) skipWithoutRemoteTerraformVersion(t) ctx := context.Background() - operations := []operationSets{ - { - prep: func(t *testing.T, orgName, dir string) { - remoteWorkspace := "remote-workspace" - tfBlock := terraformConfigRemoteBackendName(orgName, remoteWorkspace) - writeMainTF(t, tfBlock, dir) - }, - commands: []tfCommand{ + cases := testCases{ + "migrate remote backend name to tfc name": { + operations: []operationSets{ { - command: []string{"init"}, - expectedCmdOutput: `Successfully configured the backend "remote"!`, + prep: func(t *testing.T, orgName, dir string) { + remoteWorkspace := "remote-workspace" + tfBlock := terraformConfigRemoteBackendName(orgName, remoteWorkspace) + writeMainTF(t, tfBlock, dir) + }, + commands: []tfCommand{ + { + command: []string{"init"}, + expectedCmdOutput: `Successfully configured the backend "remote"!`, + }, + { + command: []string{"apply", "-auto-approve"}, + expectedCmdOutput: `Apply complete!`, + }, + }, }, { - command: []string{"apply", "-auto-approve"}, - expectedCmdOutput: `Apply complete!`, + prep: func(t *testing.T, orgName, dir string) { + wsName := "cloud-workspace" + tfBlock := terraformConfigCloudBackendName(orgName, wsName) + writeMainTF(t, tfBlock, dir) + }, + commands: []tfCommand{ + { + command: []string{"init", "-ignore-remote-version"}, + expectedCmdOutput: `Migrating from backend "remote" to Terraform Cloud.`, + userInput: []string{"yes", "yes"}, + postInputOutput: []string{ + `Should Terraform migrate your existing state?`, + `Terraform Cloud has been successfully initialized!`}, + }, + { + command: []string{"workspace", "show"}, + expectedCmdOutput: `cloud-workspace`, + }, + }, }, }, - }, - { - prep: func(t *testing.T, orgName, dir string) { - wsName := "cloud-workspace" - tfBlock := terraformConfigCloudBackendName(orgName, wsName) - writeMainTF(t, tfBlock, dir) - }, - commands: []tfCommand{ - { - command: []string{"init", "-ignore-remote-version"}, - expectedCmdOutput: `Migrating from backend "remote" to Terraform Cloud.`, - userInput: []string{"yes", "yes"}, - postInputOutput: []string{ - `Should Terraform migrate your existing state?`, - `Terraform Cloud has been successfully initialized!`}, - }, - { - command: []string{"workspace", "show"}, - expectedCmdOutput: `cloud-workspace`, - }, - }, - }, - } - validations := func(t *testing.T, orgName string) { - expectedName := "cloud-workspace" - ws, err := tfeClient.Workspaces.Read(ctx, orgName, expectedName) - if err != nil { - t.Fatal(err) - } - if ws == nil { - t.Fatalf("Expected workspace %s to be present, but is not.", expectedName) - } - } - - exp, err := expect.NewConsole(defaultOpts()...) - if err != nil { - t.Fatal(err) - } - defer exp.Close() - - tmpDir, err := ioutil.TempDir("", "terraform-test") - if err != nil { - t.Fatal(err) - } - defer os.RemoveAll(tmpDir) - - tf := e2e.NewBinary(terraformBin, tmpDir) - tf.AddEnv(cliConfigFileEnv) - defer tf.Close() - - organization, cleanup := createOrganization(t) - defer cleanup() - for _, op := range operations { - op.prep(t, organization.Name, tf.WorkDir()) - for _, tfCmd := range op.commands { - cmd := tf.Cmd(tfCmd.command...) - cmd.Stdin = exp.Tty() - cmd.Stdout = exp.Tty() - cmd.Stderr = exp.Tty() - - err = cmd.Start() - if err != nil { - t.Fatal(err) - } - - if tfCmd.expectedCmdOutput != "" { - got, err := exp.ExpectString(tfCmd.expectedCmdOutput) + validations: func(t *testing.T, orgName string) { + expectedName := "cloud-workspace" + ws, err := tfeClient.Workspaces.Read(ctx, orgName, expectedName) if err != nil { - t.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) + t.Fatal(err) } - } - - lenInput := len(tfCmd.userInput) - lenInputOutput := len(tfCmd.postInputOutput) - if lenInput > 0 { - for i := 0; i < lenInput; i++ { - input := tfCmd.userInput[i] - exp.SendLine(input) - // use the index to find the corresponding - // output that matches the input. - if lenInputOutput-1 >= i { - output := tfCmd.postInputOutput[i] - _, err := exp.ExpectString(output) - if err != nil { - t.Fatal(err) - } - } + if ws == nil { + t.Fatalf("Expected workspace %s to be present, but is not.", expectedName) } - } - - err = cmd.Wait() - if err != nil { - t.Fatal(err) - } - } + }, + }, + "migrate remote backend name to tfc same name": { + operations: []operationSets{ + { + prep: func(t *testing.T, orgName, dir string) { + remoteWorkspace := "remote-workspace" + tfBlock := terraformConfigRemoteBackendName(orgName, remoteWorkspace) + writeMainTF(t, tfBlock, dir) + }, + commands: []tfCommand{ + { + command: []string{"init"}, + expectedCmdOutput: `Successfully configured the backend "remote"!`, + }, + { + command: []string{"apply", "-auto-approve"}, + postInputOutput: []string{`Apply complete!`}, + }, + }, + }, + { + prep: func(t *testing.T, orgName, dir string) { + wsName := "remote-workspace" + tfBlock := terraformConfigCloudBackendName(orgName, wsName) + writeMainTF(t, tfBlock, dir) + }, + commands: []tfCommand{ + { + command: []string{"init", "-ignore-remote-version"}, + expectedCmdOutput: `Migrating from backend "remote" to Terraform Cloud.`, + userInput: []string{"yes", "yes"}, + postInputOutput: []string{ + `Should Terraform migrate your existing state?`, + `Terraform Cloud has been successfully initialized!`}, + }, + { + command: []string{"workspace", "show"}, + expectedCmdOutput: `remote-workspace`, + }, + }, + }, + }, + validations: func(t *testing.T, orgName string) { + expectedName := "remote-workspace" + ws, err := tfeClient.Workspaces.Read(ctx, orgName, expectedName) + if err != nil { + t.Fatal(err) + } + if ws == nil { + t.Fatalf("Expected workspace %s to be present, but is not.", expectedName) + } + }, + }, + "migrate remote backend name to tfc tags": { + operations: []operationSets{ + { + prep: func(t *testing.T, orgName, dir string) { + remoteWorkspace := "remote-workspace" + tfBlock := terraformConfigRemoteBackendName(orgName, remoteWorkspace) + writeMainTF(t, tfBlock, dir) + }, + commands: []tfCommand{ + { + command: []string{"init"}, + expectedCmdOutput: `Successfully configured the backend "remote"!`, + }, + { + command: []string{"apply", "-auto-approve"}, + postInputOutput: []string{`Apply complete!`}, + }, + { + command: []string{"workspace", "show"}, + expectedCmdOutput: `default`, + }, + }, + }, + { + prep: func(t *testing.T, orgName, dir string) { + tag := "app" + tfBlock := terraformConfigCloudBackendTags(orgName, tag) + writeMainTF(t, tfBlock, dir) + }, + commands: []tfCommand{ + { + command: []string{"init", "-ignore-remote-version"}, + expectedCmdOutput: `Migrating from backend "remote" to Terraform Cloud.`, + userInput: []string{"yes", "cloud-workspace", "yes"}, + postInputOutput: []string{ + `Should Terraform migrate your existing state?`, + `Terraform Cloud requires all workspaces to be given an explicit name.`, + `Terraform Cloud has been successfully initialized!`}, + }, + { + command: []string{"workspace", "show"}, + expectedCmdOutput: `cloud-workspace`, + }, + }, + }, + }, + validations: func(t *testing.T, orgName string) { + wsList, err := tfeClient.Workspaces.List(ctx, orgName, tfe.WorkspaceListOptions{ + Tags: tfe.String("app"), + }) + if err != nil { + t.Fatal(err) + } + if len(wsList.Items) != 1 { + t.Fatalf("Expected number of workspaces to be 1, but got %d", len(wsList.Items)) + } + ws := wsList.Items[0] + if ws.Name != "cloud-workspace" { + t.Fatalf("Expected workspace to be `cloud-workspace`, but is %s", ws.Name) + } + }, + }, + "migrate remote backend prefix to tfc name strategy single workspace": { + operations: []operationSets{ + { + prep: func(t *testing.T, orgName, dir string) { + _ = createWorkspace(t, orgName, tfe.WorkspaceCreateOptions{Name: tfe.String("app-one")}) + prefix := "app-" + tfBlock := terraformConfigRemoteBackendPrefix(orgName, prefix) + writeMainTF(t, tfBlock, dir) + }, + commands: []tfCommand{ + { + command: []string{"init"}, + expectedCmdOutput: `Terraform has been successfully initialized!`, + }, + { + command: []string{"apply", "-auto-approve"}, + postInputOutput: []string{`Apply complete!`}, + }, + }, + }, + { + prep: func(t *testing.T, orgName, dir string) { + wsName := "cloud-workspace" + tfBlock := terraformConfigCloudBackendName(orgName, wsName) + writeMainTF(t, tfBlock, dir) + }, + commands: []tfCommand{ + { + command: []string{"init", "-ignore-remote-version"}, + expectedCmdOutput: `Migrating from backend "remote" to Terraform Cloud.`, + userInput: []string{"yes", "yes"}, + postInputOutput: []string{ + `Should Terraform migrate your existing state?`, + `Terraform Cloud has been successfully initialized!`}, + }, + { + command: []string{"workspace", "show"}, + expectedCmdOutput: `cloud-workspace`, + }, + }, + }, + }, + validations: func(t *testing.T, orgName string) { + expectedName := "cloud-workspace" + ws, err := tfeClient.Workspaces.Read(ctx, orgName, expectedName) + if err != nil { + t.Fatal(err) + } + if ws == nil { + t.Fatalf("Expected workspace %s to be present, but is not.", expectedName) + } + }, + }, + "migrate remote backend prefix to tfc name strategy multi workspace": { + operations: []operationSets{ + { + prep: func(t *testing.T, orgName, dir string) { + _ = createWorkspace(t, orgName, tfe.WorkspaceCreateOptions{Name: tfe.String("app-one")}) + _ = createWorkspace(t, orgName, tfe.WorkspaceCreateOptions{Name: tfe.String("app-two")}) + prefix := "app-" + tfBlock := terraformConfigRemoteBackendPrefix(orgName, prefix) + writeMainTF(t, tfBlock, dir) + }, + commands: []tfCommand{ + { + command: []string{"init"}, + expectedCmdOutput: `The currently selected workspace (default) does not exist.`, + userInput: []string{"1"}, + postInputOutput: []string{`Terraform has been successfully initialized!`}, + }, + { + command: []string{"apply", "-auto-approve"}, + postInputOutput: []string{`Apply complete!`}, + }, + { + command: []string{"workspace", "list"}, + expectedCmdOutput: "* one", // app name retrieved via prefix + }, + { + command: []string{"workspace", "select", "two"}, + expectedCmdOutput: `Switched to workspace "two".`, // app name retrieved via prefix + }, + }, + }, + { + prep: func(t *testing.T, orgName, dir string) { + wsName := "cloud-workspace" + tfBlock := terraformConfigCloudBackendName(orgName, wsName) + writeMainTF(t, tfBlock, dir) + }, + commands: []tfCommand{ + { + command: []string{"init", "-ignore-remote-version"}, + expectedCmdOutput: `Do you want to copy only your current workspace?`, + userInput: []string{"yes"}, + postInputOutput: []string{ + `Terraform Cloud has been successfully initialized!`}, + }, + { + command: []string{"workspace", "show"}, + expectedCmdOutput: `cloud-workspace`, + }, + }, + }, + }, + validations: func(t *testing.T, orgName string) { + expectedName := "cloud-workspace" + ws, err := tfeClient.Workspaces.Read(ctx, orgName, expectedName) + if err != nil { + t.Fatal(err) + } + if ws == nil { + t.Fatalf("Expected workspace %s to be present, but is not.", expectedName) + } + wsList, err := tfeClient.Workspaces.List(ctx, orgName, tfe.WorkspaceListOptions{}) + if err != nil { + t.Fatal(err) + } + if len(wsList.Items) != 3 { + t.Fatalf("expected number of workspaces in this org to be 3, but got %d", len(wsList.Items)) + } + _, empty := getWorkspace(wsList.Items, "cloud-workspace") + if empty { + t.Fatalf("expected workspaces to include 'cloud-workspace' but didn't.") + } + _, empty = getWorkspace(wsList.Items, "app-one") + if empty { + t.Fatalf("expected workspaces to include 'app-one' but didn't.") + } + _, empty = getWorkspace(wsList.Items, "app-two") + if empty { + t.Fatalf("expected workspaces to include 'app-two' but didn't.") + } + }, + }, + "migrate remote backend prefix to tfc tags strategy single workspace": { + operations: []operationSets{ + { + prep: func(t *testing.T, orgName, dir string) { + _ = createWorkspace(t, orgName, tfe.WorkspaceCreateOptions{Name: tfe.String("app-one")}) + prefix := "app-" + tfBlock := terraformConfigRemoteBackendPrefix(orgName, prefix) + writeMainTF(t, tfBlock, dir) + }, + commands: []tfCommand{ + { + command: []string{"init"}, + expectedCmdOutput: `Terraform has been successfully initialized!`, + }, + { + command: []string{"apply", "-auto-approve"}, + postInputOutput: []string{`Apply complete!`}, + }, + }, + }, + { + prep: func(t *testing.T, orgName, dir string) { + tag := "app" + tfBlock := terraformConfigCloudBackendTags(orgName, tag) + writeMainTF(t, tfBlock, dir) + }, + commands: []tfCommand{ + { + command: []string{"init", "-ignore-remote-version"}, + expectedCmdOutput: `Migrating from backend "remote" to Terraform Cloud.`, + userInput: []string{"yes", "cloud-workspace", "yes"}, + postInputOutput: []string{ + `Should Terraform migrate your existing state?`, + `Terraform Cloud requires all workspaces to be given an explicit name.`, + `Terraform Cloud has been successfully initialized!`}, + }, + { + command: []string{"workspace", "list"}, + expectedCmdOutput: `cloud-workspace`, + }, + }, + }, + }, + validations: func(t *testing.T, orgName string) { + expectedName := "cloud-workspace" + ws, err := tfeClient.Workspaces.Read(ctx, orgName, expectedName) + if err != nil { + t.Fatal(err) + } + if ws == nil { + t.Fatalf("Expected workspace %s to be present, but is not.", expectedName) + } + }, + }, + "migrate remote backend prefix to tfc tags strategy multi workspace": { + operations: []operationSets{ + { + prep: func(t *testing.T, orgName, dir string) { + _ = createWorkspace(t, orgName, tfe.WorkspaceCreateOptions{Name: tfe.String("app-one")}) + _ = createWorkspace(t, orgName, tfe.WorkspaceCreateOptions{Name: tfe.String("app-two")}) + prefix := "app-" + tfBlock := terraformConfigRemoteBackendPrefix(orgName, prefix) + writeMainTF(t, tfBlock, dir) + }, + commands: []tfCommand{ + { + command: []string{"init"}, + expectedCmdOutput: `The currently selected workspace (default) does not exist.`, + userInput: []string{"1"}, + postInputOutput: []string{`Terraform has been successfully initialized!`}, + }, + { + command: []string{"apply"}, + expectedCmdOutput: `Do you want to perform these actions in workspace "app-one"?`, + userInput: []string{"yes"}, + postInputOutput: []string{`Apply complete!`}, + }, + { + command: []string{"workspace", "select", "two"}, + }, + { + command: []string{"apply"}, + expectedCmdOutput: `Do you want to perform these actions in workspace "app-two"?`, + userInput: []string{"yes"}, + postInputOutput: []string{`Apply complete!`}, + }, + }, + }, + { + prep: func(t *testing.T, orgName, dir string) { + tag := "app" + tfBlock := terraformConfigCloudBackendTags(orgName, tag) + writeMainTF(t, tfBlock, dir) + }, + commands: []tfCommand{ + { + command: []string{"init", "-ignore-remote-version"}, + expectedCmdOutput: `Do you wish to proceed?`, + userInput: []string{"yes"}, + postInputOutput: []string{`Terraform Cloud has been successfully initialized!`}, + }, + { + command: []string{"workspace", "show"}, + expectedCmdOutput: "app-two", + }, + { + command: []string{"workspace", "select", "app-one"}, + expectedCmdOutput: `Switched to workspace "app-one".`, + }, + }, + }, + }, + validations: func(t *testing.T, orgName string) { + wsList, err := tfeClient.Workspaces.List(ctx, orgName, tfe.WorkspaceListOptions{ + Tags: tfe.String("app"), + }) + if err != nil { + t.Fatal(err) + } + if len(wsList.Items) != 2 { + t.Logf("Expected the number of workspaces to be 2, but got %d", len(wsList.Items)) + } + ws, empty := getWorkspace(wsList.Items, "app-one") + if empty { + t.Fatalf("expected workspaces to include 'app-one' but didn't.") + } + if len(ws.TagNames) == 0 { + t.Fatalf("expected workspaces 'one' to have tags.") + } + ws, empty = getWorkspace(wsList.Items, "app-two") + if empty { + t.Fatalf("expected workspaces to include 'app-two' but didn't.") + } + if len(ws.TagNames) == 0 { + t.Fatalf("expected workspaces 'app-two' to have tags.") + } + }, + }, } - if validations != nil { - validations(t, organization.Name) - } + testRunner(t, cases, 1) } -func Test_migrate_remote_backend_name_to_tfc_same_name(t *testing.T) { +func Test_migrate_remote_backend_multi_org(t *testing.T) { t.Parallel() skipIfMissingEnvVar(t) skipWithoutRemoteTerraformVersion(t) ctx := context.Background() - operations := []operationSets{ - { - prep: func(t *testing.T, orgName, dir string) { - remoteWorkspace := "remote-workspace" - tfBlock := terraformConfigRemoteBackendName(orgName, remoteWorkspace) - writeMainTF(t, tfBlock, dir) - }, - commands: []tfCommand{ + cases := testCases{ + "migrate remote backend name to tfc name": { + operations: []operationSets{ { - command: []string{"init"}, - expectedCmdOutput: `Successfully configured the backend "remote"!`, + prep: func(t *testing.T, orgName, dir string) { + remoteWorkspace := "remote-workspace" + tfBlock := terraformConfigRemoteBackendName(orgName, remoteWorkspace) + writeMainTF(t, tfBlock, dir) + }, + commands: []tfCommand{ + { + command: []string{"init"}, + expectedCmdOutput: `Successfully configured the backend "remote"!`, + }, + { + command: []string{"apply", "-auto-approve"}, + postInputOutput: []string{`Apply complete!`}, + }, + }, }, { - command: []string{"apply", "-auto-approve"}, - postInputOutput: []string{`Apply complete!`}, + prep: func(t *testing.T, orgName, dir string) { + wsName := "remote-workspace" + tfBlock := terraformConfigCloudBackendName(orgName, wsName) + writeMainTF(t, tfBlock, dir) + }, + commands: []tfCommand{ + { + command: []string{"init", "-ignore-remote-version"}, + expectedCmdOutput: `Migrating from backend "remote" to Terraform Cloud.`, + userInput: []string{"yes", "yes"}, + postInputOutput: []string{ + `Should Terraform migrate your existing state?`, + `Terraform Cloud has been successfully initialized!`}, + }, + { + command: []string{"workspace", "show"}, + expectedCmdOutput: `remote-workspace`, + }, + }, }, }, - }, - { - prep: func(t *testing.T, orgName, dir string) { - wsName := "remote-workspace" - tfBlock := terraformConfigCloudBackendName(orgName, wsName) - writeMainTF(t, tfBlock, dir) - }, - commands: []tfCommand{ - { - command: []string{"init", "-ignore-remote-version"}, - expectedCmdOutput: `Migrating from backend "remote" to Terraform Cloud.`, - userInput: []string{"yes", "yes"}, - postInputOutput: []string{ - `Should Terraform migrate your existing state?`, - `Terraform Cloud has been successfully initialized!`}, - }, - { - command: []string{"workspace", "show"}, - expectedCmdOutput: `remote-workspace`, - }, - }, - }, - } - validations := func(t *testing.T, orgName string) { - expectedName := "remote-workspace" - ws, err := tfeClient.Workspaces.Read(ctx, orgName, expectedName) - if err != nil { - t.Fatal(err) - } - if ws == nil { - t.Fatalf("Expected workspace %s to be present, but is not.", expectedName) - } - } - - exp, err := expect.NewConsole(defaultOpts()...) - if err != nil { - t.Fatal(err) - } - defer exp.Close() - - tmpDir, err := ioutil.TempDir("", "terraform-test") - if err != nil { - t.Fatal(err) - } - defer os.RemoveAll(tmpDir) - - tf := e2e.NewBinary(terraformBin, tmpDir) - tf.AddEnv(cliConfigFileEnv) - defer tf.Close() - - organization, cleanup := createOrganization(t) - defer cleanup() - for _, op := range operations { - op.prep(t, organization.Name, tf.WorkDir()) - for _, tfCmd := range op.commands { - cmd := tf.Cmd(tfCmd.command...) - cmd.Stdin = exp.Tty() - cmd.Stdout = exp.Tty() - cmd.Stderr = exp.Tty() - - err = cmd.Start() - if err != nil { - t.Fatal(err) - } - - if tfCmd.expectedCmdOutput != "" { - got, err := exp.ExpectString(tfCmd.expectedCmdOutput) + validations: func(t *testing.T, orgName string) { + expectedName := "remote-workspace" + ws, err := tfeClient.Workspaces.Read(ctx, orgName, expectedName) if err != nil { - t.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) + t.Fatal(err) } - } - - lenInput := len(tfCmd.userInput) - lenInputOutput := len(tfCmd.postInputOutput) - if lenInput > 0 { - for i := 0; i < lenInput; i++ { - input := tfCmd.userInput[i] - exp.SendLine(input) - // use the index to find the corresponding - // output that matches the input. - if lenInputOutput-1 >= i { - output := tfCmd.postInputOutput[i] - _, err := exp.ExpectString(output) - if err != nil { - t.Fatal(err) - } - } + if ws == nil { + t.Fatalf("Expected workspace %s to be present, but is not.", expectedName) } - } - - err = cmd.Wait() - if err != nil { - t.Fatal(err) - } - } + }, + }, } - if validations != nil { - validations(t, organization.Name) - } -} - -func Test_migrate_remote_backend_name_to_tfc_name_different_org(t *testing.T) { - t.Parallel() - skipIfMissingEnvVar(t) - skipWithoutRemoteTerraformVersion(t) - - ctx := context.Background() - operations := []operationSets{ - { - prep: func(t *testing.T, orgName, dir string) { - remoteWorkspace := "remote-workspace" - tfBlock := terraformConfigRemoteBackendName(orgName, remoteWorkspace) - writeMainTF(t, tfBlock, dir) - }, - commands: []tfCommand{ - { - command: []string{"init"}, - expectedCmdOutput: `Successfully configured the backend "remote"!`, - }, - { - command: []string{"apply", "-auto-approve"}, - postInputOutput: []string{`Apply complete!`}, - }, - }, - }, - { - prep: func(t *testing.T, orgName, dir string) { - wsName := "remote-workspace" - tfBlock := terraformConfigCloudBackendName(orgName, wsName) - writeMainTF(t, tfBlock, dir) - }, - commands: []tfCommand{ - { - command: []string{"init", "-ignore-remote-version"}, - expectedCmdOutput: `Migrating from backend "remote" to Terraform Cloud.`, - userInput: []string{"yes", "yes"}, - postInputOutput: []string{ - `Should Terraform migrate your existing state?`, - `Terraform Cloud has been successfully initialized!`}, - }, - { - command: []string{"workspace", "show"}, - expectedCmdOutput: `remote-workspace`, - }, - }, - }, - } - validations := func(t *testing.T, orgName string) { - expectedName := "remote-workspace" - ws, err := tfeClient.Workspaces.Read(ctx, orgName, expectedName) - if err != nil { - t.Fatal(err) - } - if ws == nil { - t.Fatalf("Expected workspace %s to be present, but is not.", expectedName) - } - } - - exp, err := expect.NewConsole(defaultOpts()...) - if err != nil { - t.Fatal(err) - } - defer exp.Close() - - tmpDir, err := ioutil.TempDir("", "terraform-test") - if err != nil { - t.Fatal(err) - } - defer os.RemoveAll(tmpDir) - - tf := e2e.NewBinary(terraformBin, tmpDir) - tf.AddEnv(cliConfigFileEnv) - defer tf.Close() - - orgOne, cleanupOne := createOrganization(t) - orgTwo, cleanupTwo := createOrganization(t) - defer cleanupOne() - defer cleanupTwo() - orgs := []string{orgOne.Name, orgTwo.Name} - var orgName string - for index, op := range operations { - orgName = orgs[index] - op.prep(t, orgName, tf.WorkDir()) - for _, tfCmd := range op.commands { - cmd := tf.Cmd(tfCmd.command...) - cmd.Stdin = exp.Tty() - cmd.Stdout = exp.Tty() - cmd.Stderr = exp.Tty() - - err = cmd.Start() - if err != nil { - t.Fatal(err) - } - - if tfCmd.expectedCmdOutput != "" { - got, err := exp.ExpectString(tfCmd.expectedCmdOutput) - if err != nil { - t.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) - } - } - - lenInput := len(tfCmd.userInput) - lenInputOutput := len(tfCmd.postInputOutput) - if lenInput > 0 { - for i := 0; i < lenInput; i++ { - input := tfCmd.userInput[i] - exp.SendLine(input) - // use the index to find the corresponding - // output that matches the input. - if lenInputOutput-1 >= i { - output := tfCmd.postInputOutput[i] - _, err := exp.ExpectString(output) - if err != nil { - t.Fatal(err) - } - } - } - } - - err = cmd.Wait() - if err != nil { - t.Fatal(err) - } - } - } - - if validations != nil { - validations(t, orgName) - } -} - -func Test_migrate_remote_backend_name_to_tfc_tags(t *testing.T) { - t.Parallel() - skipIfMissingEnvVar(t) - skipWithoutRemoteTerraformVersion(t) - - ctx := context.Background() - operations := []operationSets{ - { - prep: func(t *testing.T, orgName, dir string) { - remoteWorkspace := "remote-workspace" - tfBlock := terraformConfigRemoteBackendName(orgName, remoteWorkspace) - writeMainTF(t, tfBlock, dir) - }, - commands: []tfCommand{ - { - command: []string{"init"}, - expectedCmdOutput: `Successfully configured the backend "remote"!`, - }, - { - command: []string{"apply", "-auto-approve"}, - postInputOutput: []string{`Apply complete!`}, - }, - { - command: []string{"workspace", "show"}, - expectedCmdOutput: `default`, - }, - }, - }, - { - prep: func(t *testing.T, orgName, dir string) { - tag := "app" - tfBlock := terraformConfigCloudBackendTags(orgName, tag) - writeMainTF(t, tfBlock, dir) - }, - commands: []tfCommand{ - { - command: []string{"init", "-ignore-remote-version"}, - expectedCmdOutput: `Migrating from backend "remote" to Terraform Cloud.`, - userInput: []string{"yes", "cloud-workspace", "yes"}, - postInputOutput: []string{ - `Should Terraform migrate your existing state?`, - `Terraform Cloud requires all workspaces to be given an explicit name.`, - `Terraform Cloud has been successfully initialized!`}, - }, - { - command: []string{"workspace", "show"}, - expectedCmdOutput: `cloud-workspace`, - }, - }, - }, - } - validations := func(t *testing.T, orgName string) { - wsList, err := tfeClient.Workspaces.List(ctx, orgName, tfe.WorkspaceListOptions{ - Tags: tfe.String("app"), - }) - if err != nil { - t.Fatal(err) - } - if len(wsList.Items) != 1 { - t.Fatalf("Expected number of workspaces to be 1, but got %d", len(wsList.Items)) - } - ws := wsList.Items[0] - if ws.Name != "cloud-workspace" { - t.Fatalf("Expected workspace to be `cloud-workspace`, but is %s", ws.Name) - } - } - - exp, err := expect.NewConsole(defaultOpts()...) - if err != nil { - t.Fatal(err) - } - defer exp.Close() - - tmpDir, err := ioutil.TempDir("", "terraform-test") - if err != nil { - t.Fatal(err) - } - defer os.RemoveAll(tmpDir) - - tf := e2e.NewBinary(terraformBin, tmpDir) - tf.AddEnv(cliConfigFileEnv) - defer tf.Close() - - organization, cleanup := createOrganization(t) - defer cleanup() - for _, op := range operations { - op.prep(t, organization.Name, tf.WorkDir()) - for _, tfCmd := range op.commands { - cmd := tf.Cmd(tfCmd.command...) - cmd.Stdin = exp.Tty() - cmd.Stdout = exp.Tty() - cmd.Stderr = exp.Tty() - - err = cmd.Start() - if err != nil { - t.Fatal(err) - } - - if tfCmd.expectedCmdOutput != "" { - got, err := exp.ExpectString(tfCmd.expectedCmdOutput) - if err != nil { - t.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) - } - } - - lenInput := len(tfCmd.userInput) - lenInputOutput := len(tfCmd.postInputOutput) - if lenInput > 0 { - for i := 0; i < lenInput; i++ { - input := tfCmd.userInput[i] - exp.SendLine(input) - // use the index to find the corresponding - // output that matches the input. - if lenInputOutput-1 >= i { - output := tfCmd.postInputOutput[i] - _, err := exp.ExpectString(output) - if err != nil { - t.Fatal(err) - } - } - } - } - - err = cmd.Wait() - if err != nil { - t.Fatal(err) - } - } - } - - if validations != nil { - validations(t, organization.Name) - } -} - -func Test_migrate_remote_backend_prefix_to_tfc_name_strategy_single_workspace(t *testing.T) { - t.Parallel() - skipIfMissingEnvVar(t) - skipWithoutRemoteTerraformVersion(t) - - ctx := context.Background() - operations := []operationSets{ - { - prep: func(t *testing.T, orgName, dir string) { - _ = createWorkspace(t, orgName, tfe.WorkspaceCreateOptions{Name: tfe.String("app-one")}) - prefix := "app-" - tfBlock := terraformConfigRemoteBackendPrefix(orgName, prefix) - writeMainTF(t, tfBlock, dir) - }, - commands: []tfCommand{ - { - command: []string{"init"}, - expectedCmdOutput: `Terraform has been successfully initialized!`, - }, - { - command: []string{"apply", "-auto-approve"}, - postInputOutput: []string{`Apply complete!`}, - }, - }, - }, - { - prep: func(t *testing.T, orgName, dir string) { - wsName := "cloud-workspace" - tfBlock := terraformConfigCloudBackendName(orgName, wsName) - writeMainTF(t, tfBlock, dir) - }, - commands: []tfCommand{ - { - command: []string{"init", "-ignore-remote-version"}, - expectedCmdOutput: `Migrating from backend "remote" to Terraform Cloud.`, - userInput: []string{"yes", "yes"}, - postInputOutput: []string{ - `Should Terraform migrate your existing state?`, - `Terraform Cloud has been successfully initialized!`}, - }, - { - command: []string{"workspace", "show"}, - expectedCmdOutput: `cloud-workspace`, - }, - }, - }, - } - validations := func(t *testing.T, orgName string) { - expectedName := "cloud-workspace" - ws, err := tfeClient.Workspaces.Read(ctx, orgName, expectedName) - if err != nil { - t.Fatal(err) - } - if ws == nil { - t.Fatalf("Expected workspace %s to be present, but is not.", expectedName) - } - } - - exp, err := expect.NewConsole(defaultOpts()...) - if err != nil { - t.Fatal(err) - } - defer exp.Close() - - tmpDir, err := ioutil.TempDir("", "terraform-test") - if err != nil { - t.Fatal(err) - } - defer os.RemoveAll(tmpDir) - - tf := e2e.NewBinary(terraformBin, tmpDir) - tf.AddEnv(cliConfigFileEnv) - defer tf.Close() - - organization, cleanup := createOrganization(t) - defer cleanup() - for _, op := range operations { - op.prep(t, organization.Name, tf.WorkDir()) - for _, tfCmd := range op.commands { - cmd := tf.Cmd(tfCmd.command...) - cmd.Stdin = exp.Tty() - cmd.Stdout = exp.Tty() - cmd.Stderr = exp.Tty() - - err = cmd.Start() - if err != nil { - t.Fatal(err) - } - - if tfCmd.expectedCmdOutput != "" { - got, err := exp.ExpectString(tfCmd.expectedCmdOutput) - if err != nil { - t.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) - } - } - - lenInput := len(tfCmd.userInput) - lenInputOutput := len(tfCmd.postInputOutput) - if lenInput > 0 { - for i := 0; i < lenInput; i++ { - input := tfCmd.userInput[i] - exp.SendLine(input) - // use the index to find the corresponding - // output that matches the input. - if lenInputOutput-1 >= i { - output := tfCmd.postInputOutput[i] - got, err := exp.ExpectString(output) - if err != nil { - t.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", output, err, got) - } - } - } - } - - err = cmd.Wait() - if err != nil { - t.Fatal(err) - } - } - } - - if validations != nil { - validations(t, organization.Name) - } -} - -func Test_migrate_remote_backend_prefix_to_tfc_name_strategy_multi_workspace(t *testing.T) { - t.Parallel() - skipIfMissingEnvVar(t) - skipWithoutRemoteTerraformVersion(t) - - ctx := context.Background() - operations := []operationSets{ - { - prep: func(t *testing.T, orgName, dir string) { - _ = createWorkspace(t, orgName, tfe.WorkspaceCreateOptions{Name: tfe.String("app-one")}) - _ = createWorkspace(t, orgName, tfe.WorkspaceCreateOptions{Name: tfe.String("app-two")}) - prefix := "app-" - tfBlock := terraformConfigRemoteBackendPrefix(orgName, prefix) - writeMainTF(t, tfBlock, dir) - }, - commands: []tfCommand{ - { - command: []string{"init"}, - expectedCmdOutput: `The currently selected workspace (default) does not exist.`, - userInput: []string{"1"}, - postInputOutput: []string{`Terraform has been successfully initialized!`}, - }, - { - command: []string{"apply", "-auto-approve"}, - postInputOutput: []string{`Apply complete!`}, - }, - { - command: []string{"workspace", "list"}, - expectedCmdOutput: "* one", // app name retrieved via prefix - }, - { - command: []string{"workspace", "select", "two"}, - expectedCmdOutput: `Switched to workspace "two".`, // app name retrieved via prefix - }, - }, - }, - { - prep: func(t *testing.T, orgName, dir string) { - wsName := "cloud-workspace" - tfBlock := terraformConfigCloudBackendName(orgName, wsName) - writeMainTF(t, tfBlock, dir) - }, - commands: []tfCommand{ - { - command: []string{"init", "-ignore-remote-version"}, - expectedCmdOutput: `Do you want to copy only your current workspace?`, - userInput: []string{"yes"}, - postInputOutput: []string{ - `Terraform Cloud has been successfully initialized!`}, - }, - { - command: []string{"workspace", "show"}, - expectedCmdOutput: `cloud-workspace`, - }, - }, - }, - } - validations := func(t *testing.T, orgName string) { - expectedName := "cloud-workspace" - ws, err := tfeClient.Workspaces.Read(ctx, orgName, expectedName) - if err != nil { - t.Fatal(err) - } - if ws == nil { - t.Fatalf("Expected workspace %s to be present, but is not.", expectedName) - } - wsList, err := tfeClient.Workspaces.List(ctx, orgName, tfe.WorkspaceListOptions{}) - if err != nil { - t.Fatal(err) - } - if len(wsList.Items) != 3 { - t.Fatalf("expected number of workspaces in this org to be 3, but got %d", len(wsList.Items)) - } - _, empty := getWorkspace(wsList.Items, "cloud-workspace") - if empty { - t.Fatalf("expected workspaces to include 'cloud-workspace' but didn't.") - } - _, empty = getWorkspace(wsList.Items, "app-one") - if empty { - t.Fatalf("expected workspaces to include 'app-one' but didn't.") - } - _, empty = getWorkspace(wsList.Items, "app-two") - if empty { - t.Fatalf("expected workspaces to include 'app-two' but didn't.") - } - } - - exp, err := expect.NewConsole(defaultOpts()...) - if err != nil { - t.Fatal(err) - } - defer exp.Close() - - tmpDir, err := ioutil.TempDir("", "terraform-test") - if err != nil { - t.Fatal(err) - } - defer os.RemoveAll(tmpDir) - - tf := e2e.NewBinary(terraformBin, tmpDir) - tf.AddEnv(cliConfigFileEnv) - defer tf.Close() - - organization, cleanup := createOrganization(t) - defer cleanup() - for _, op := range operations { - op.prep(t, organization.Name, tf.WorkDir()) - for _, tfCmd := range op.commands { - cmd := tf.Cmd(tfCmd.command...) - cmd.Stdin = exp.Tty() - cmd.Stdout = exp.Tty() - cmd.Stderr = exp.Tty() - - err = cmd.Start() - if err != nil { - t.Fatal(err) - } - - if tfCmd.expectedCmdOutput != "" { - got, err := exp.ExpectString(tfCmd.expectedCmdOutput) - if err != nil { - t.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) - } - } - - lenInput := len(tfCmd.userInput) - lenInputOutput := len(tfCmd.postInputOutput) - if lenInput > 0 { - for i := 0; i < lenInput; i++ { - input := tfCmd.userInput[i] - exp.SendLine(input) - // use the index to find the corresponding - // output that matches the input. - if lenInputOutput-1 >= i { - output := tfCmd.postInputOutput[i] - _, err := exp.ExpectString(output) - if err != nil { - t.Fatal(err) - } - } - } - } - - err = cmd.Wait() - if err != nil { - t.Fatal(err) - } - } - } - - if validations != nil { - validations(t, organization.Name) - } -} - -func Test_migrate_remote_backend_prefix_to_tfc_tags_strategy_single_workspace(t *testing.T) { - t.Parallel() - skipIfMissingEnvVar(t) - skipWithoutRemoteTerraformVersion(t) - - ctx := context.Background() - operations := []operationSets{ - { - prep: func(t *testing.T, orgName, dir string) { - _ = createWorkspace(t, orgName, tfe.WorkspaceCreateOptions{Name: tfe.String("app-one")}) - prefix := "app-" - tfBlock := terraformConfigRemoteBackendPrefix(orgName, prefix) - writeMainTF(t, tfBlock, dir) - }, - commands: []tfCommand{ - { - command: []string{"init"}, - expectedCmdOutput: `Terraform has been successfully initialized!`, - }, - { - command: []string{"apply", "-auto-approve"}, - postInputOutput: []string{`Apply complete!`}, - }, - }, - }, - { - prep: func(t *testing.T, orgName, dir string) { - tag := "app" - tfBlock := terraformConfigCloudBackendTags(orgName, tag) - writeMainTF(t, tfBlock, dir) - }, - commands: []tfCommand{ - { - command: []string{"init", "-ignore-remote-version"}, - expectedCmdOutput: `Migrating from backend "remote" to Terraform Cloud.`, - userInput: []string{"yes", "cloud-workspace", "yes"}, - postInputOutput: []string{ - `Should Terraform migrate your existing state?`, - `Terraform Cloud requires all workspaces to be given an explicit name.`, - `Terraform Cloud has been successfully initialized!`}, - }, - { - command: []string{"workspace", "list"}, - expectedCmdOutput: `cloud-workspace`, - }, - }, - }, - } - validations := func(t *testing.T, orgName string) { - expectedName := "cloud-workspace" - ws, err := tfeClient.Workspaces.Read(ctx, orgName, expectedName) - if err != nil { - t.Fatal(err) - } - if ws == nil { - t.Fatalf("Expected workspace %s to be present, but is not.", expectedName) - } - } - - exp, err := expect.NewConsole(defaultOpts()...) - if err != nil { - t.Fatal(err) - } - defer exp.Close() - - tmpDir, err := ioutil.TempDir("", "terraform-test") - if err != nil { - t.Fatal(err) - } - defer os.RemoveAll(tmpDir) - - tf := e2e.NewBinary(terraformBin, tmpDir) - tf.AddEnv(cliConfigFileEnv) - defer tf.Close() - - organization, cleanup := createOrganization(t) - defer cleanup() - for _, op := range operations { - op.prep(t, organization.Name, tf.WorkDir()) - for _, tfCmd := range op.commands { - cmd := tf.Cmd(tfCmd.command...) - cmd.Stdin = exp.Tty() - cmd.Stdout = exp.Tty() - cmd.Stderr = exp.Tty() - - err = cmd.Start() - if err != nil { - t.Fatal(err) - } - - if tfCmd.expectedCmdOutput != "" { - got, err := exp.ExpectString(tfCmd.expectedCmdOutput) - if err != nil { - t.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) - } - } - - lenInput := len(tfCmd.userInput) - lenInputOutput := len(tfCmd.postInputOutput) - if lenInput > 0 { - for i := 0; i < lenInput; i++ { - input := tfCmd.userInput[i] - exp.SendLine(input) - // use the index to find the corresponding - // output that matches the input. - if lenInputOutput-1 >= i { - output := tfCmd.postInputOutput[i] - _, err := exp.ExpectString(output) - if err != nil { - t.Fatal(err) - } - } - } - } - - err = cmd.Wait() - if err != nil { - t.Fatal(err) - } - } - } - - if validations != nil { - validations(t, organization.Name) - } -} - -func Test_migrate_remote_backend_prefix_to_tfc_tags_strategy_multi_workspace(t *testing.T) { - t.Parallel() - skipIfMissingEnvVar(t) - skipWithoutRemoteTerraformVersion(t) - - ctx := context.Background() - operations := []operationSets{ - { - prep: func(t *testing.T, orgName, dir string) { - _ = createWorkspace(t, orgName, tfe.WorkspaceCreateOptions{Name: tfe.String("app-one")}) - _ = createWorkspace(t, orgName, tfe.WorkspaceCreateOptions{Name: tfe.String("app-two")}) - prefix := "app-" - tfBlock := terraformConfigRemoteBackendPrefix(orgName, prefix) - writeMainTF(t, tfBlock, dir) - }, - commands: []tfCommand{ - { - command: []string{"init"}, - expectedCmdOutput: `The currently selected workspace (default) does not exist.`, - userInput: []string{"1"}, - postInputOutput: []string{`Terraform has been successfully initialized!`}, - }, - { - command: []string{"apply"}, - expectedCmdOutput: `Do you want to perform these actions in workspace "app-one"?`, - userInput: []string{"yes"}, - postInputOutput: []string{`Apply complete!`}, - }, - { - command: []string{"workspace", "select", "two"}, - }, - { - command: []string{"apply"}, - expectedCmdOutput: `Do you want to perform these actions in workspace "app-two"?`, - userInput: []string{"yes"}, - postInputOutput: []string{`Apply complete!`}, - }, - }, - }, - { - prep: func(t *testing.T, orgName, dir string) { - tag := "app" - tfBlock := terraformConfigCloudBackendTags(orgName, tag) - writeMainTF(t, tfBlock, dir) - }, - commands: []tfCommand{ - { - command: []string{"init", "-ignore-remote-version"}, - expectedCmdOutput: `Do you wish to proceed?`, - userInput: []string{"yes"}, - postInputOutput: []string{`Terraform Cloud has been successfully initialized!`}, - }, - { - command: []string{"workspace", "show"}, - expectedCmdOutput: "app-two", - }, - { - command: []string{"workspace", "select", "app-one"}, - expectedCmdOutput: `Switched to workspace "app-one".`, - }, - }, - }, - } - validations := func(t *testing.T, orgName string) { - wsList, err := tfeClient.Workspaces.List(ctx, orgName, tfe.WorkspaceListOptions{ - Tags: tfe.String("app"), - }) - if err != nil { - t.Fatal(err) - } - if len(wsList.Items) != 2 { - t.Logf("Expected the number of workspaces to be 2, but got %d", len(wsList.Items)) - } - ws, empty := getWorkspace(wsList.Items, "app-one") - if empty { - t.Fatalf("expected workspaces to include 'app-one' but didn't.") - } - if len(ws.TagNames) == 0 { - t.Fatalf("expected workspaces 'one' to have tags.") - } - ws, empty = getWorkspace(wsList.Items, "app-two") - if empty { - t.Fatalf("expected workspaces to include 'app-two' but didn't.") - } - if len(ws.TagNames) == 0 { - t.Fatalf("expected workspaces 'app-two' to have tags.") - } - } - - exp, err := expect.NewConsole(defaultOpts()...) - if err != nil { - t.Fatal(err) - } - defer exp.Close() - - tmpDir, err := ioutil.TempDir("", "terraform-test") - if err != nil { - t.Fatal(err) - } - defer os.RemoveAll(tmpDir) - - tf := e2e.NewBinary(terraformBin, tmpDir) - tf.AddEnv(cliConfigFileEnv) - defer tf.Close() - - organization, cleanup := createOrganization(t) - defer cleanup() - for _, op := range operations { - op.prep(t, organization.Name, tf.WorkDir()) - for _, tfCmd := range op.commands { - cmd := tf.Cmd(tfCmd.command...) - cmd.Stdin = exp.Tty() - cmd.Stdout = exp.Tty() - cmd.Stderr = exp.Tty() - - err = cmd.Start() - if err != nil { - t.Fatal(err) - } - - if tfCmd.expectedCmdOutput != "" { - got, err := exp.ExpectString(tfCmd.expectedCmdOutput) - if err != nil { - t.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) - } - } - - lenInput := len(tfCmd.userInput) - lenInputOutput := len(tfCmd.postInputOutput) - if lenInput > 0 { - for i := 0; i < lenInput; i++ { - input := tfCmd.userInput[i] - exp.SendLine(input) - // use the index to find the corresponding - // output that matches the input. - if lenInputOutput-1 >= i { - output := tfCmd.postInputOutput[i] - _, err := exp.ExpectString(output) - if err != nil { - t.Fatal(err) - } - } - } - } - - err = cmd.Wait() - if err != nil { - t.Fatal(err) - } - } - } - - if validations != nil { - validations(t, organization.Name) - } + testRunner(t, cases, 2) } diff --git a/internal/cloud/e2e/migrate_state_single_to_tfc_test.go b/internal/cloud/e2e/migrate_state_single_to_tfc_test.go index edf0c0376..ff386cc93 100644 --- a/internal/cloud/e2e/migrate_state_single_to_tfc_test.go +++ b/internal/cloud/e2e/migrate_state_single_to_tfc_test.go @@ -2,13 +2,9 @@ package main import ( "context" - "io/ioutil" - "os" "testing" - expect "github.com/Netflix/go-expect" tfe "github.com/hashicorp/go-tfe" - "github.com/hashicorp/terraform/internal/e2e" ) func Test_migrate_single_to_tfc(t *testing.T) { @@ -18,10 +14,7 @@ func Test_migrate_single_to_tfc(t *testing.T) { ctx := context.Background() - cases := map[string]struct { - operations []operationSets - validations func(t *testing.T, orgName string) - }{ + cases := testCases{ "migrate using cloud workspace name strategy": { operations: []operationSets{ { @@ -129,76 +122,5 @@ func Test_migrate_single_to_tfc(t *testing.T) { }, } - for name, tc := range cases { - tc := tc // rebind tc into this lexical scope - t.Run(name, func(subtest *testing.T) { - subtest.Parallel() - organization, cleanup := createOrganization(t) - defer cleanup() - exp, err := expect.NewConsole(defaultOpts()...) - if err != nil { - subtest.Fatal(err) - } - defer exp.Close() - - tmpDir, err := ioutil.TempDir("", "terraform-test") - if err != nil { - subtest.Fatal(err) - } - defer os.RemoveAll(tmpDir) - - tf := e2e.NewBinary(terraformBin, tmpDir) - tf.AddEnv(cliConfigFileEnv) - defer tf.Close() - - for _, op := range tc.operations { - op.prep(t, organization.Name, tf.WorkDir()) - for _, tfCmd := range op.commands { - cmd := tf.Cmd(tfCmd.command...) - cmd.Stdin = exp.Tty() - cmd.Stdout = exp.Tty() - cmd.Stderr = exp.Tty() - - err = cmd.Start() - if err != nil { - subtest.Fatal(err) - } - - if tfCmd.expectedCmdOutput != "" { - got, err := exp.ExpectString(tfCmd.expectedCmdOutput) - if err != nil { - subtest.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) - } - } - - lenInput := len(tfCmd.userInput) - lenInputOutput := len(tfCmd.postInputOutput) - if lenInput > 0 { - for i := 0; i < lenInput; i++ { - input := tfCmd.userInput[i] - exp.SendLine(input) - // use the index to find the corresponding - // output that matches the input. - if lenInputOutput-1 >= i { - output := tfCmd.postInputOutput[i] - _, err := exp.ExpectString(output) - if err != nil { - subtest.Fatal(err) - } - } - } - } - - err = cmd.Wait() - if err != nil { - subtest.Fatal(err) - } - } - } - - if tc.validations != nil { - tc.validations(t, organization.Name) - } - }) - } + testRunner(t, cases, 1) } diff --git a/internal/cloud/e2e/migrate_state_tfc_to_other_test.go b/internal/cloud/e2e/migrate_state_tfc_to_other_test.go index 3e0be6894..4029ba5b4 100644 --- a/internal/cloud/e2e/migrate_state_tfc_to_other_test.go +++ b/internal/cloud/e2e/migrate_state_tfc_to_other_test.go @@ -1,21 +1,14 @@ package main import ( - "io/ioutil" - "os" "testing" - - expect "github.com/Netflix/go-expect" - "github.com/hashicorp/terraform/internal/e2e" ) func Test_migrate_tfc_to_other(t *testing.T) { t.Parallel() skipIfMissingEnvVar(t) - cases := map[string]struct { - operations []operationSets - }{ + cases := testCases{ "migrate from cloud to local backend": { operations: []operationSets{ { @@ -48,71 +41,5 @@ func Test_migrate_tfc_to_other(t *testing.T) { }, } - for name, tc := range cases { - tc := tc // rebind tc into this lexical scope - t.Run(name, func(subtest *testing.T) { - subtest.Parallel() - organization, cleanup := createOrganization(t) - defer cleanup() - exp, err := expect.NewConsole(defaultOpts()...) - if err != nil { - subtest.Fatal(err) - } - defer exp.Close() - - tmpDir, err := ioutil.TempDir("", "terraform-test") - if err != nil { - subtest.Fatal(err) - } - defer os.RemoveAll(tmpDir) - - tf := e2e.NewBinary(terraformBin, tmpDir) - tf.AddEnv(cliConfigFileEnv) - defer tf.Close() - - for _, op := range tc.operations { - op.prep(t, organization.Name, tf.WorkDir()) - for _, tfCmd := range op.commands { - cmd := tf.Cmd(tfCmd.command...) - cmd.Stdin = exp.Tty() - cmd.Stdout = exp.Tty() - cmd.Stderr = exp.Tty() - - err = cmd.Start() - if err != nil { - subtest.Fatal(err) - } - - if tfCmd.expectedCmdOutput != "" { - got, err := exp.ExpectString(tfCmd.expectedCmdOutput) - if err != nil { - subtest.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) - } - } - - lenInput := len(tfCmd.userInput) - lenInputOutput := len(tfCmd.postInputOutput) - if lenInput > 0 { - for i := 0; i < lenInput; i++ { - input := tfCmd.userInput[i] - exp.SendLine(input) - // use the index to find the corresponding - // output that matches the input. - if lenInputOutput-1 >= i { - output := tfCmd.postInputOutput[i] - _, err := exp.ExpectString(output) - if err != nil { - subtest.Fatal(err) - } - } - } - } - err = cmd.Wait() - if err != nil && !tfCmd.expectError { - subtest.Fatal(err) - } - } - } - }) - } + testRunner(t, cases, 1) } diff --git a/internal/cloud/e2e/migrate_state_tfc_to_tfc_test.go b/internal/cloud/e2e/migrate_state_tfc_to_tfc_test.go index 123534a95..d5c7113b9 100644 --- a/internal/cloud/e2e/migrate_state_tfc_to_tfc_test.go +++ b/internal/cloud/e2e/migrate_state_tfc_to_tfc_test.go @@ -2,13 +2,9 @@ package main import ( "context" - "io/ioutil" - "os" "testing" - expect "github.com/Netflix/go-expect" tfe "github.com/hashicorp/go-tfe" - "github.com/hashicorp/terraform/internal/e2e" tfversion "github.com/hashicorp/terraform/version" ) @@ -19,16 +15,8 @@ func Test_migrate_tfc_to_tfc_single_workspace(t *testing.T) { ctx := context.Background() - cases := map[string]struct { - setup func(t *testing.T) (string, func()) - operations []operationSets - validations func(t *testing.T, orgName string) - }{ + cases := testCases{ "migrating from name to name": { - setup: func(t *testing.T) (string, func()) { - organization, cleanup := createOrganization(t) - return organization.Name, cleanup - }, operations: []operationSets{ { prep: func(t *testing.T, orgName, dir string) { @@ -93,10 +81,6 @@ func Test_migrate_tfc_to_tfc_single_workspace(t *testing.T) { }, }, "migrating from name to tags": { - setup: func(t *testing.T) (string, func()) { - organization, cleanup := createOrganization(t) - return organization.Name, cleanup - }, operations: []operationSets{ { prep: func(t *testing.T, orgName, dir string) { @@ -155,10 +139,6 @@ func Test_migrate_tfc_to_tfc_single_workspace(t *testing.T) { }, }, "migrating from name to tags without ignore-version flag": { - setup: func(t *testing.T) (string, func()) { - organization, cleanup := createOrganization(t) - return organization.Name, cleanup - }, operations: []operationSets{ { prep: func(t *testing.T, orgName, dir string) { @@ -220,78 +200,7 @@ func Test_migrate_tfc_to_tfc_single_workspace(t *testing.T) { }, } - for name, tc := range cases { - tc := tc // rebind tc into this lexical scope - t.Run(name, func(subtest *testing.T) { - subtest.Parallel() - exp, err := expect.NewConsole(defaultOpts()...) - if err != nil { - subtest.Fatal(err) - } - defer exp.Close() - - tmpDir, err := ioutil.TempDir("", "terraform-test") - if err != nil { - subtest.Fatal(err) - } - defer os.RemoveAll(tmpDir) - - tf := e2e.NewBinary(terraformBin, tmpDir) - defer tf.Close() - tf.AddEnv(cliConfigFileEnv) - - orgName, cleanup := tc.setup(t) - defer cleanup() - for _, op := range tc.operations { - op.prep(t, orgName, tf.WorkDir()) - for _, tfCmd := range op.commands { - cmd := tf.Cmd(tfCmd.command...) - cmd.Stdin = exp.Tty() - cmd.Stdout = exp.Tty() - cmd.Stderr = exp.Tty() - - err = cmd.Start() - if err != nil { - subtest.Fatal(err) - } - - if tfCmd.expectedCmdOutput != "" { - got, err := exp.ExpectString(tfCmd.expectedCmdOutput) - if err != nil { - subtest.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) - } - } - - lenInput := len(tfCmd.userInput) - lenInputOutput := len(tfCmd.postInputOutput) - if lenInput > 0 { - for i := 0; i < lenInput; i++ { - input := tfCmd.userInput[i] - exp.SendLine(input) - // use the index to find the corresponding - // output that matches the input. - if lenInputOutput-1 >= i { - output := tfCmd.postInputOutput[i] - _, err := exp.ExpectString(output) - if err != nil { - subtest.Fatal(err) - } - } - } - } - - err = cmd.Wait() - if err != nil && !tfCmd.expectError { - subtest.Fatal(err.Error()) - } - } - } - - if tc.validations != nil { - tc.validations(t, orgName) - } - }) - } + testRunner(t, cases, 1) } func Test_migrate_tfc_to_tfc_multiple_workspace(t *testing.T) { @@ -301,16 +210,8 @@ func Test_migrate_tfc_to_tfc_multiple_workspace(t *testing.T) { ctx := context.Background() - cases := map[string]struct { - setup func(t *testing.T) (string, func()) - operations []operationSets - validations func(t *testing.T, orgName string) - }{ + cases := testCases{ "migrating from multiple workspaces via tags to name": { - setup: func(t *testing.T) (string, func()) { - organization, cleanup := createOrganization(t) - return organization.Name, cleanup - }, operations: []operationSets{ { prep: func(t *testing.T, orgName, dir string) { @@ -392,10 +293,6 @@ func Test_migrate_tfc_to_tfc_multiple_workspace(t *testing.T) { }, }, "migrating from multiple workspaces via tags to other tags": { - setup: func(t *testing.T) (string, func()) { - organization, cleanup := createOrganization(t) - return organization.Name, cleanup - }, operations: []operationSets{ { prep: func(t *testing.T, orgName, dir string) { @@ -468,76 +365,5 @@ func Test_migrate_tfc_to_tfc_multiple_workspace(t *testing.T) { }, } - for name, tc := range cases { - tc := tc // rebind tc into this lexical scope - t.Run(name, func(subtest *testing.T) { - subtest.Parallel() - exp, err := expect.NewConsole(defaultOpts()...) - if err != nil { - subtest.Fatal(err) - } - defer exp.Close() - - tmpDir, err := ioutil.TempDir("", "terraform-test") - if err != nil { - subtest.Fatal(err) - } - defer os.RemoveAll(tmpDir) - - tf := e2e.NewBinary(terraformBin, tmpDir) - defer tf.Close() - tf.AddEnv(cliConfigFileEnv) - - orgName, cleanup := tc.setup(t) - defer cleanup() - for _, op := range tc.operations { - op.prep(t, orgName, tf.WorkDir()) - for _, tfCmd := range op.commands { - cmd := tf.Cmd(tfCmd.command...) - cmd.Stdin = exp.Tty() - cmd.Stdout = exp.Tty() - cmd.Stderr = exp.Tty() - - err = cmd.Start() - if err != nil { - subtest.Fatal(err) - } - - if tfCmd.expectedCmdOutput != "" { - got, err := exp.ExpectString(tfCmd.expectedCmdOutput) - if err != nil { - subtest.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) - } - } - - lenInput := len(tfCmd.userInput) - lenInputOutput := len(tfCmd.postInputOutput) - if lenInput > 0 { - for i := 0; i < lenInput; i++ { - input := tfCmd.userInput[i] - exp.SendLine(input) - // use the index to find the corresponding - // output that matches the input. - if lenInputOutput-1 >= i { - output := tfCmd.postInputOutput[i] - _, err := exp.ExpectString(output) - if err != nil { - subtest.Fatal(err) - } - } - } - } - - err = cmd.Wait() - if err != nil { - subtest.Fatal(err.Error()) - } - } - } - - if tc.validations != nil { - tc.validations(t, orgName) - } - }) - } + testRunner(t, cases, 1) } diff --git a/internal/cloud/e2e/run_variables_test.go b/internal/cloud/e2e/run_variables_test.go index 54fcb38a6..ee1f66eeb 100644 --- a/internal/cloud/e2e/run_variables_test.go +++ b/internal/cloud/e2e/run_variables_test.go @@ -2,13 +2,9 @@ package main import ( "fmt" - "io/ioutil" - "os" "testing" - expect "github.com/Netflix/go-expect" tfe "github.com/hashicorp/go-tfe" - "github.com/hashicorp/terraform/internal/e2e" tfversion "github.com/hashicorp/terraform/version" ) @@ -81,78 +77,5 @@ func Test_cloud_run_variables(t *testing.T) { }, } - for name, tc := range cases { - tc := tc // rebind tc into this lexical scope - t.Run(name, func(subtest *testing.T) { - subtest.Parallel() - organization, cleanup := createOrganization(t) - defer cleanup() - exp, err := expect.NewConsole(defaultOpts()...) - if err != nil { - subtest.Fatal(err) - } - defer exp.Close() - - tmpDir, err := ioutil.TempDir("", "terraform-test") - if err != nil { - subtest.Fatal(err) - } - defer os.RemoveAll(tmpDir) - - tf := e2e.NewBinary(terraformBin, tmpDir) - tf.AddEnv("TF_CLI_ARGS=-no-color") - tf.AddEnv("TF_VAR_baz=qux") - tf.AddEnv(cliConfigFileEnv) - defer tf.Close() - - for _, op := range tc.operations { - op.prep(t, organization.Name, tf.WorkDir()) - for _, tfCmd := range op.commands { - cmd := tf.Cmd(tfCmd.command...) - cmd.Stdin = exp.Tty() - cmd.Stdout = exp.Tty() - cmd.Stderr = exp.Tty() - - err = cmd.Start() - if err != nil { - subtest.Fatal(err) - } - - if tfCmd.expectedCmdOutput != "" { - got, err := exp.ExpectString(tfCmd.expectedCmdOutput) - if err != nil { - subtest.Fatalf("error while waiting for output\nwant: %s\nerror: %s\noutput\n%s", tfCmd.expectedCmdOutput, err, got) - } - } - - lenInput := len(tfCmd.userInput) - lenInputOutput := len(tfCmd.postInputOutput) - if lenInput > 0 { - for i := 0; i < lenInput; i++ { - input := tfCmd.userInput[i] - exp.SendLine(input) - // use the index to find the corresponding - // output that matches the input. - if lenInputOutput-1 >= i { - output := tfCmd.postInputOutput[i] - _, err := exp.ExpectString(output) - if err != nil { - subtest.Fatalf(`Expected command output "%s", but got %v `, tfCmd.expectedCmdOutput, err) - } - } - } - } - - err = cmd.Wait() - if err != nil && !tfCmd.expectError { - subtest.Fatal(err) - } - } - - if tc.validations != nil { - tc.validations(t, organization.Name) - } - } - }) - } + testRunner(t, cases, 1, "TF_CLI_ARGS=-no-color", "TF_VAR_baz=qux") } From 9b449bec995ba28f445f80c8dba1172d0b4a4661 Mon Sep 17 00:00:00 2001 From: Nick Fagerlund Date: Mon, 20 Dec 2021 21:46:39 -0800 Subject: [PATCH 06/68] Sort dependencies when encoding `ResourceInstanceObject` Resource dependencies are by nature an unordered collection, but they're persisted to state as a JSON array (in random order). This makes a mess for `terraform apply -refresh-only`, which sees the new random order as a change that requires the user to approve a state update. (As an additional problem on top of that, the user interface for refresh-only runs doesn't expect to see that as a type of change, so it says "no changes! would you like to update to reflect these detected changes?") This commit changes `ResourceInstanceObject.Encode()` to sort the in-memory slice of dependencies (lexically, by address) before passing it on to be compared and persisted. This appears to fix the observed UI issues with a minimum of logic changes. --- internal/states/instance_object.go | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/internal/states/instance_object.go b/internal/states/instance_object.go index 85ca52878..7452b4174 100644 --- a/internal/states/instance_object.go +++ b/internal/states/instance_object.go @@ -1,6 +1,8 @@ package states import ( + "sort" + "github.com/zclconf/go-cty/cty" ctyjson "github.com/zclconf/go-cty/cty/json" @@ -108,6 +110,13 @@ func (o *ResourceInstanceObject) Encode(ty cty.Type, schemaVersion uint64) (*Res return nil, err } + // Dependencies are collected and merged in an unordered format (using map + // keys as a set), then later changed to a slice (in random ordering) to be + // stored in state as an array. To avoid pointless thrashing of state in + // refresh-only runs, we can either override comparison of dependency lists + // (more desirable, but tricky for Reasons) or just sort when encoding. + sort.Slice(o.Dependencies, func(i, j int) bool { return o.Dependencies[i].String() < o.Dependencies[j].String() }) + return &ResourceInstanceObjectSrc{ SchemaVersion: schemaVersion, AttrsJSON: src, From d7ef123c127307fb24abb4a71422689b375ca9a4 Mon Sep 17 00:00:00 2001 From: Alisdair McDiarmid Date: Tue, 21 Dec 2021 14:50:47 -0500 Subject: [PATCH 07/68] refactoring: Move nested modules When applying module `moved` statements by iterating through modules in state, we previously required an exact match from the `moved` statement's `from` field and the module address. This permitted moving resources directly inside a module, but did not recur into module calls within those moved modules. This commit moves that exact match requirement so that it only applies to `moved` statements targeting resources. In turn this allows nested modules to be moved. --- internal/refactoring/move_execute.go | 20 +++++--- internal/refactoring/move_execute_test.go | 62 +++++++++++++++++++++++ 2 files changed, 74 insertions(+), 8 deletions(-) diff --git a/internal/refactoring/move_execute.go b/internal/refactoring/move_execute.go index 7f5fbae23..b99da1072 100644 --- a/internal/refactoring/move_execute.go +++ b/internal/refactoring/move_execute.go @@ -88,16 +88,13 @@ func ApplyMoves(stmts []MoveStatement, state *states.State) MoveResults { for _, ms := range state.Modules { modAddr := ms.Addr - if !stmt.From.SelectsModule(modAddr) { - continue - } - // We now know that the current module is relevant but what - // we'll do with it depends on the object kind. + // We don't yet know that the current module is relevant, and + // we determine that differently for each the object kind. switch kind := stmt.ObjectKind(); kind { case addrs.MoveEndpointModule: // For a module endpoint we just try the module address - // directly. + // directly, and execute the moves if it matches. if newAddr, matches := modAddr.MoveDestination(stmt.From, stmt.To); matches { log.Printf("[TRACE] refactoring.ApplyMoves: %s has moved to %s", modAddr, newAddr) @@ -125,8 +122,15 @@ func ApplyMoves(stmts []MoveStatement, state *states.State) MoveResults { continue } case addrs.MoveEndpointResource: - // For a resource endpoint we need to search each of the - // resources and resource instances in the module. + // For a resource endpoint we require an exact containing + // module match, because by definition a matching resource + // cannot be nested any deeper than that. + if !stmt.From.SelectsModule(modAddr) { + continue + } + + // We then need to search each of the resources and resource + // instances in the module. for _, rs := range ms.Resources { rAddr := rs.Addr if newAddr, matches := rAddr.MoveDestination(stmt.From, stmt.To); matches { diff --git a/internal/refactoring/move_execute_test.go b/internal/refactoring/move_execute_test.go index ab71f5b0f..3f568ee79 100644 --- a/internal/refactoring/move_execute_test.go +++ b/internal/refactoring/move_execute_test.go @@ -21,7 +21,10 @@ func TestApplyMoves(t *testing.T) { } moduleBoo, _ := addrs.ParseModuleInstanceStr("module.boo") + moduleBar, _ := addrs.ParseModuleInstanceStr("module.bar") moduleBarKey, _ := addrs.ParseModuleInstanceStr("module.bar[0]") + moduleBooHoo, _ := addrs.ParseModuleInstanceStr("module.boo.module.hoo") + moduleBarHoo, _ := addrs.ParseModuleInstanceStr("module.bar.module.hoo") instAddrs := map[string]addrs.AbsResourceInstance{ "foo.from": addrs.Resource{ @@ -84,6 +87,12 @@ func TestApplyMoves(t *testing.T) { Name: "to", }.Instance(addrs.IntKey(0)).Absolute(moduleBoo), + "module.bar.foo.from": addrs.Resource{ + Mode: addrs.ManagedResourceMode, + Type: "foo", + Name: "from", + }.Instance(addrs.NoKey).Absolute(moduleBar), + "module.bar[0].foo.from": addrs.Resource{ Mode: addrs.ManagedResourceMode, Type: "foo", @@ -113,6 +122,18 @@ func TestApplyMoves(t *testing.T) { Type: "foo", Name: "to", }.Instance(addrs.IntKey(0)).Absolute(moduleBarKey), + + "module.boo.module.hoo.foo.from": addrs.Resource{ + Mode: addrs.ManagedResourceMode, + Type: "foo", + Name: "from", + }.Instance(addrs.NoKey).Absolute(moduleBooHoo), + + "module.bar.module.hoo.foo.from": addrs.Resource{ + Mode: addrs.ManagedResourceMode, + Type: "foo", + Name: "from", + }.Instance(addrs.NoKey).Absolute(moduleBarHoo), } emptyResults := MoveResults{ @@ -289,6 +310,47 @@ func TestApplyMoves(t *testing.T) { }, }, + "module move with child module": { + []MoveStatement{ + testMoveStatement(t, "", "module.boo", "module.bar"), + }, + states.BuildState(func(s *states.SyncState) { + s.SetResourceInstanceCurrent( + instAddrs["module.boo.foo.from"], + &states.ResourceInstanceObjectSrc{ + Status: states.ObjectReady, + AttrsJSON: []byte(`{}`), + }, + providerAddr, + ) + s.SetResourceInstanceCurrent( + instAddrs["module.boo.module.hoo.foo.from"], + &states.ResourceInstanceObjectSrc{ + Status: states.ObjectReady, + AttrsJSON: []byte(`{}`), + }, + providerAddr, + ) + }), + MoveResults{ + Changes: map[addrs.UniqueKey]MoveSuccess{ + instAddrs["module.bar.foo.from"].UniqueKey(): { + From: instAddrs["module.boo.foo.from"], + To: instAddrs["module.bar.foo.from"], + }, + instAddrs["module.bar.module.hoo.foo.from"].UniqueKey(): { + From: instAddrs["module.boo.module.hoo.foo.from"], + To: instAddrs["module.bar.module.hoo.foo.from"], + }, + }, + Blocked: map[addrs.UniqueKey]MoveBlocked{}, + }, + []string{ + `module.bar.foo.from`, + `module.bar.module.hoo.foo.from`, + }, + }, + "move whole single module to indexed module": { []MoveStatement{ testMoveStatement(t, "", "module.boo", "module.bar[0]"), From 346418e31f9327c954b61e9354b7bf2d48c83632 Mon Sep 17 00:00:00 2001 From: James Bardin Date: Tue, 21 Dec 2021 14:50:53 -0500 Subject: [PATCH 08/68] IsModuleMoveReIndex Add a method for checking if the From and To addresses in a move statement are only changing the indexes of modules relative to the statement module. This is needed because move statement nested within the module will be able to match against both the From and To addresses, causing cycles in the order of move operations. --- internal/addrs/module_instance_test.go | 6 +- internal/addrs/move_endpoint_module.go | 52 +++++++- internal/addrs/move_endpoint_module_test.go | 133 ++++++++++++++++++++ 3 files changed, 187 insertions(+), 4 deletions(-) diff --git a/internal/addrs/module_instance_test.go b/internal/addrs/module_instance_test.go index 4ad096cfc..393bcd57e 100644 --- a/internal/addrs/module_instance_test.go +++ b/internal/addrs/module_instance_test.go @@ -162,9 +162,9 @@ func TestModuleInstance_IsDeclaredByCall(t *testing.T) { } func mustParseModuleInstanceStr(str string) ModuleInstance { - mi, err := ParseModuleInstanceStr(str) - if err != nil { - panic(err) + mi, diags := ParseModuleInstanceStr(str) + if diags.HasErrors() { + panic(diags.ErrWithWarnings()) } return mi } diff --git a/internal/addrs/move_endpoint_module.go b/internal/addrs/move_endpoint_module.go index e2180f25a..7ff17621b 100644 --- a/internal/addrs/move_endpoint_module.go +++ b/internal/addrs/move_endpoint_module.go @@ -373,7 +373,7 @@ func (e *MoveEndpointInModule) CanChainFrom(other *MoveEndpointInModule) bool { return false } -// NestedWithin returns true if the reciever describes an address that is +// NestedWithin returns true if the receiver describes an address that is // contained within one of the objects that the given other address could // select. func (e *MoveEndpointInModule) NestedWithin(other *MoveEndpointInModule) bool { @@ -704,3 +704,53 @@ func (r AbsResourceInstance) MoveDestination(fromMatch, toMatch *MoveEndpointInM panic("unexpected object kind") } } + +// IsModuleMoveReIndex takes the from and to endpoints from a move statement, +// and returns true if the only changes are to module indexes, and all +// non-absolute paths remain the same. +func IsModuleMoveReIndex(from, to *MoveEndpointInModule) bool { + // The statements must originate from the same module. + if !from.module.Equal(to.module) { + panic("cannot compare move expressions from different modules") + } + + switch f := from.relSubject.(type) { + case AbsModuleCall: + switch t := to.relSubject.(type) { + case ModuleInstance: + if len(t) != 1 { + // An AbsModuleCall only ever has one segment, so the + // ModuleInstance length must match. + return false + } + + return f.Call.Name == t[0].Name + } + + case ModuleInstance: + switch t := to.relSubject.(type) { + case AbsModuleCall: + if len(f) != 1 { + return false + } + + return f[0].Name == t.Call.Name + + case ModuleInstance: + // We must have the same number of segments, and the names must all + // match in order for this to solely be an index change operation. + if len(f) != len(t) { + return false + } + + for i := range f { + if f[i].Name != t[i].Name { + return false + } + } + return true + } + } + + return false +} diff --git a/internal/addrs/move_endpoint_module_test.go b/internal/addrs/move_endpoint_module_test.go index bda37ca53..1e2758239 100644 --- a/internal/addrs/move_endpoint_module_test.go +++ b/internal/addrs/move_endpoint_module_test.go @@ -1584,6 +1584,139 @@ func TestSelectsResource(t *testing.T) { } } +func TestIsModuleMoveReIndex(t *testing.T) { + tests := []struct { + from, to AbsMoveable + expect bool + }{ + { + from: mustParseModuleInstanceStr(`module.bar`), + to: mustParseModuleInstanceStr(`module.bar`), + expect: true, + }, + { + from: mustParseModuleInstanceStr(`module.bar`), + to: mustParseModuleInstanceStr(`module.bar[0]`), + expect: true, + }, + { + from: AbsModuleCall{ + Call: ModuleCall{Name: "bar"}, + }, + to: mustParseModuleInstanceStr(`module.bar[0]`), + expect: true, + }, + { + from: mustParseModuleInstanceStr(`module.bar["a"]`), + to: AbsModuleCall{ + Call: ModuleCall{Name: "bar"}, + }, + expect: true, + }, + { + from: mustParseModuleInstanceStr(`module.foo`), + to: mustParseModuleInstanceStr(`module.bar`), + expect: false, + }, + { + from: mustParseModuleInstanceStr(`module.bar`), + to: mustParseModuleInstanceStr(`module.foo[0]`), + expect: false, + }, + { + from: AbsModuleCall{ + Call: ModuleCall{Name: "bar"}, + }, + to: mustParseModuleInstanceStr(`module.foo[0]`), + expect: false, + }, + { + from: mustParseModuleInstanceStr(`module.bar["a"]`), + to: AbsModuleCall{ + Call: ModuleCall{Name: "foo"}, + }, + expect: false, + }, + { + from: mustParseModuleInstanceStr(`module.bar.module.baz`), + to: mustParseModuleInstanceStr(`module.bar.module.baz`), + expect: true, + }, + { + from: mustParseModuleInstanceStr(`module.bar.module.baz`), + to: mustParseModuleInstanceStr(`module.bar.module.baz[0]`), + expect: true, + }, + { + from: mustParseModuleInstanceStr(`module.bar.module.baz`), + to: mustParseModuleInstanceStr(`module.baz.module.baz`), + expect: false, + }, + { + from: mustParseModuleInstanceStr(`module.bar.module.baz`), + to: mustParseModuleInstanceStr(`module.baz.module.baz[0]`), + expect: false, + }, + { + from: mustParseModuleInstanceStr(`module.bar.module.baz`), + to: mustParseModuleInstanceStr(`module.bar[0].module.baz`), + expect: true, + }, + { + from: mustParseModuleInstanceStr(`module.bar[0].module.baz`), + to: mustParseModuleInstanceStr(`module.bar.module.baz[0]`), + expect: true, + }, + { + from: mustParseModuleInstanceStr(`module.bar[0].module.baz`), + to: mustParseModuleInstanceStr(`module.bar[1].module.baz[0]`), + expect: true, + }, + { + from: AbsModuleCall{ + Call: ModuleCall{Name: "baz"}, + }, + to: mustParseModuleInstanceStr(`module.bar.module.baz[0]`), + expect: false, + }, + { + from: mustParseModuleInstanceStr(`module.bar.module.baz[0]`), + to: AbsModuleCall{ + Call: ModuleCall{Name: "baz"}, + }, + expect: false, + }, + { + from: mustParseModuleInstanceStr(`module.baz`), + to: mustParseModuleInstanceStr(`module.bar.module.baz[0]`), + expect: false, + }, + { + from: mustParseModuleInstanceStr(`module.bar.module.baz[0]`), + to: mustParseModuleInstanceStr(`module.baz`), + expect: false, + }, + } + + for i, test := range tests { + t.Run(fmt.Sprintf("[%02d]IsModuleMoveReIndex(%s, %s)", i, test.from, test.to), + func(t *testing.T) { + from := &MoveEndpointInModule{ + relSubject: test.from, + } + + to := &MoveEndpointInModule{ + relSubject: test.to, + } + + if got := IsModuleMoveReIndex(from, to); got != test.expect { + t.Errorf("expected %t, got %t", test.expect, got) + } + }, + ) + } +} + func mustParseAbsResourceInstanceStr(s string) AbsResourceInstance { r, diags := ParseAbsResourceInstanceStr(s) if diags.HasErrors() { From e761117562ec0cbe15ac552089ee93112860898f Mon Sep 17 00:00:00 2001 From: James Bardin Date: Tue, 21 Dec 2021 15:40:56 -0500 Subject: [PATCH 09/68] find implied moves in nested modules Implied moves in nested modules were being skipped --- internal/refactoring/move_statement.go | 2 +- internal/refactoring/move_statement_test.go | 47 +++++++++++++++++++ .../child/move-statement-implied.tf | 16 +++++++ .../move-statement-implied.tf | 4 ++ 4 files changed, 68 insertions(+), 1 deletion(-) create mode 100644 internal/refactoring/testdata/move-statement-implied/child/move-statement-implied.tf diff --git a/internal/refactoring/move_statement.go b/internal/refactoring/move_statement.go index a363602c3..08fffeb6f 100644 --- a/internal/refactoring/move_statement.go +++ b/internal/refactoring/move_statement.go @@ -149,7 +149,7 @@ func impliedMoveStatements(cfg *configs.Config, prevRunState *states.State, expl } for _, childCfg := range cfg.Children { - into = findMoveStatements(childCfg, into) + into = impliedMoveStatements(childCfg, prevRunState, explicitStmts, into) } return into diff --git a/internal/refactoring/move_statement_test.go b/internal/refactoring/move_statement_test.go index c6f7c2d79..249d7df7e 100644 --- a/internal/refactoring/move_statement_test.go +++ b/internal/refactoring/move_statement_test.go @@ -18,6 +18,15 @@ func TestImpliedMoveStatements(t *testing.T) { Name: name, }.Absolute(addrs.RootModuleInstance) } + + nestedResourceAddr := func(mod, name string) addrs.AbsResource { + return addrs.Resource{ + Mode: addrs.ManagedResourceMode, + Type: "foo", + Name: name, + }.Absolute(addrs.RootModuleInstance.Child(mod, addrs.NoKey)) + } + instObjState := func() *states.ResourceInstanceObjectSrc { return &states.ResourceInstanceObjectSrc{} } @@ -86,6 +95,19 @@ func TestImpliedMoveStatements(t *testing.T) { instObjState(), providerAddr, ) + + // Add two resource nested in a module to ensure we find these + // recursively. + s.SetResourceInstanceCurrent( + nestedResourceAddr("child", "formerly_count").Instance(addrs.IntKey(0)), + instObjState(), + providerAddr, + ) + s.SetResourceInstanceCurrent( + nestedResourceAddr("child", "now_count").Instance(addrs.NoKey), + instObjState(), + providerAddr, + ) }) explicitStmts := FindMoveStatements(rootCfg) @@ -101,6 +123,19 @@ func TestImpliedMoveStatements(t *testing.T) { End: tfdiags.SourcePos{Line: 5, Column: 32, Byte: 211}, }, }, + + // Found implied moves in a nested module, ignoring the explicit moves + { + From: addrs.ImpliedMoveStatementEndpoint(nestedResourceAddr("child", "formerly_count").Instance(addrs.IntKey(0)), tfdiags.SourceRange{}), + To: addrs.ImpliedMoveStatementEndpoint(nestedResourceAddr("child", "formerly_count").Instance(addrs.NoKey), tfdiags.SourceRange{}), + Implied: true, + DeclRange: tfdiags.SourceRange{ + Filename: "testdata/move-statement-implied/child/move-statement-implied.tf", + Start: tfdiags.SourcePos{Line: 5, Column: 1, Byte: 180}, + End: tfdiags.SourcePos{Line: 5, Column: 32, Byte: 211}, + }, + }, + { From: addrs.ImpliedMoveStatementEndpoint(resourceAddr("now_count").Instance(addrs.NoKey), tfdiags.SourceRange{}), To: addrs.ImpliedMoveStatementEndpoint(resourceAddr("now_count").Instance(addrs.IntKey(0)), tfdiags.SourceRange{}), @@ -112,6 +147,18 @@ func TestImpliedMoveStatements(t *testing.T) { }, }, + // Found implied moves in a nested module, ignoring the explicit moves + { + From: addrs.ImpliedMoveStatementEndpoint(nestedResourceAddr("child", "now_count").Instance(addrs.NoKey), tfdiags.SourceRange{}), + To: addrs.ImpliedMoveStatementEndpoint(nestedResourceAddr("child", "now_count").Instance(addrs.IntKey(0)), tfdiags.SourceRange{}), + Implied: true, + DeclRange: tfdiags.SourceRange{ + Filename: "testdata/move-statement-implied/child/move-statement-implied.tf", + Start: tfdiags.SourcePos{Line: 10, Column: 11, Byte: 282}, + End: tfdiags.SourcePos{Line: 10, Column: 12, Byte: 283}, + }, + }, + // We generate foo.ambiguous[0] to foo.ambiguous here, even though // there's already a foo.ambiguous in the state, because it's the // responsibility of the later ApplyMoves step to deal with the diff --git a/internal/refactoring/testdata/move-statement-implied/child/move-statement-implied.tf b/internal/refactoring/testdata/move-statement-implied/child/move-statement-implied.tf new file mode 100644 index 000000000..87d09c827 --- /dev/null +++ b/internal/refactoring/testdata/move-statement-implied/child/move-statement-implied.tf @@ -0,0 +1,16 @@ +# This fixture is useful only in conjunction with a previous run state that +# conforms to the statements encoded in the resource names. It's for +# TestImpliedMoveStatements only. + +resource "foo" "formerly_count" { + # but not count anymore +} + +resource "foo" "now_count" { + count = 1 +} + +moved { + from = foo.no_longer_present[1] + to = foo.no_longer_present +} diff --git a/internal/refactoring/testdata/move-statement-implied/move-statement-implied.tf b/internal/refactoring/testdata/move-statement-implied/move-statement-implied.tf index 498ead305..4ea628ea6 100644 --- a/internal/refactoring/testdata/move-statement-implied/move-statement-implied.tf +++ b/internal/refactoring/testdata/move-statement-implied/move-statement-implied.tf @@ -48,3 +48,7 @@ resource "foo" "ambiguous" { # set it up to have both no-key and zero-key instances in the # state. } + +module "child" { + source = "./child" +} From 31ed2d987ccadd4eb2b21eb033e494d4663263c7 Mon Sep 17 00:00:00 2001 From: Dylan Staley <88163+dstaley@users.noreply.github.com> Date: Tue, 21 Dec 2021 14:52:51 -0800 Subject: [PATCH 10/68] store website nav files --- website/data/cli-nav-data.json | 498 +++++++++ website/data/configuration-nav-data.json | 8 + website/data/guides-nav-data.json | 12 + website/data/internals-nav-data.json | 56 + website/data/intro-nav-data.json | 23 + website/data/language-nav-data.json | 1220 ++++++++++++++++++++++ 6 files changed, 1817 insertions(+) create mode 100644 website/data/cli-nav-data.json create mode 100644 website/data/configuration-nav-data.json create mode 100644 website/data/guides-nav-data.json create mode 100644 website/data/internals-nav-data.json create mode 100644 website/data/intro-nav-data.json create mode 100644 website/data/language-nav-data.json diff --git a/website/data/cli-nav-data.json b/website/data/cli-nav-data.json new file mode 100644 index 000000000..391816d6d --- /dev/null +++ b/website/data/cli-nav-data.json @@ -0,0 +1,498 @@ +[ + { "heading": "Terraform CLI" }, + { "title": "Overview", "path": "" }, + { "title": "Basic CLI Features", "href": "/cli/commands" }, + { + "title": "Initializing Working Directories", + "routes": [ + { "title": "Overview", "path": "init" }, + { "title": "init", "href": "/cli/commands/init" }, + { "title": "get", "href": "/cli/commands/get" } + ] + }, + { + "title": "Provisioning Infrastructure", + "routes": [ + { "title": "Overview", "path": "run" }, + { "title": "plan", "href": "/cli/commands/plan" }, + { "title": "apply", "href": "/cli/commands/apply" }, + { "title": "destroy", "href": "/cli/commands/destroy" } + ] + }, + { + "title": "Authenticating", + "routes": [ + { "title": "Overview", "path": "auth" }, + { "title": "login", "href": "/cli/commands/login" }, + { "title": "logout", "href": "/cli/commands/logout" } + ] + }, + { + "title": "Writing and Modifying Code", + "routes": [ + { "title": "Overview", "path": "code" }, + { "title": "console", "href": "/cli/commands/console" }, + { "title": "fmt", "href": "/cli/commands/fmt" }, + { "title": "validate", "href": "/cli/commands/validate" }, + { + "title": "0.13upgrade", + "href": "/cli/commands/0.13upgrade" + }, + { + "title": "0.12upgrade", + "href": "/cli/commands/0.12upgrade" + } + ] + }, + { + "title": "Inspecting Infrastructure", + "routes": [ + { "title": "Overview", "path": "inspect" }, + { "title": "graph", "href": "/cli/commands/graph" }, + { "title": "output", "href": "/cli/commands/output" }, + { "title": "show", "href": "/cli/commands/show" }, + { + "title": "state list", + "href": "/cli/commands/state/list" + }, + { + "title": "state show", + "href": "/cli/commands/state/show" + } + ] + }, + { + "title": "Importing Infrastructure", + "routes": [ + { "title": "Overview", "path": "import" }, + { + "title": "import", + "href": "/cli/commands/import" + }, + { "title": "Usage Tips", "path": "import/usage" }, + { + "title": "Resource Importability", + "path": "import/importability" + } + ] + }, + { + "title": "Manipulating State", + "routes": [ + { "title": "Overview", "path": "state" }, + { + "title": "Resource Addressing", + "path": "state/resource-addressing" + }, + { "title": "state", "href": "/cli/commands/state" }, + { + "title": "Inspecting State", + "routes": [ + { "title": "Overview", "path": "state/inspect" }, + { + "title": "state list", + "href": "/cli/commands/state/list" + }, + { + "title": "state show", + "href": "/cli/commands/state/show" + }, + { + "title": "refresh", + "href": "/cli/commands/refresh" + } + ] + }, + { + "title": "Forcing Re-creation (Tainting)", + "routes": [ + { "title": "Overview", "path": "state/taint" }, + { + "title": "taint", + "href": "/cli/commands/taint" + }, + { + "title": "untaint", + "href": "/cli/commands/untaint" + } + ] + }, + { + "title": "Moving Resources", + "routes": [ + { "title": "Overview", "path": "state/move" }, + { + "title": "state mv", + "href": "/cli/commands/state/mv" + }, + { + "title": "state rm", + "href": "/cli/commands/state/rm" + }, + { + "title": "state replace-provider", + "href": "/cli/commands/state/replace-provider" + } + ] + }, + { + "title": "Disaster Recovery", + "routes": [ + { + "title": "Overview", + "path": "state/recover" + }, + { + "title": "state pull", + "href": "/cli/commands/state/pull" + }, + { + "title": "state push", + "href": "/cli/commands/state/push" + }, + { + "title": "force-unlock", + "href": "/cli/commands/force-unlock" + } + ] + } + ] + }, + { + "title": "Managing Workspaces", + "routes": [ + { "title": "Overview", "path": "workspaces" }, + { + "title": "workspace", + "routes": [ + { "title": "Overview", "href": "/cli/commands/workspace" }, + { + "title": "workspace list", + "href": "/cli/commands/workspace/list" + }, + { + "title": "workspace select", + "href": "/cli/commands/workspace/select" + }, + { + "title": "workspace new", + "href": "/cli/commands/workspace/new" + }, + { + "title": "workspace delete", + "href": "/cli/commands/workspace/delete" + }, + { + "title": "workspace show", + "href": "/cli/commands/workspace/show" + } + ] + } + ] + }, + { + "title": "Managing Plugins", + "routes": [ + { "title": "Overview", "path": "plugins" }, + { "title": "Plugin Signing", "path": "plugins/signing" }, + { + "title": "providers", + "href": "/cli/commands/providers" + }, + { + "title": "version", + "href": "/cli/commands/version" + }, + { + "title": "providers lock", + "href": "/cli/commands/providers/lock" + }, + { + "title": "providers mirror", + "href": "/cli/commands/providers/mirror" + }, + { + "title": "providers schema", + "href": "/cli/commands/providers/schema" + } + ] + }, + { + "title": "CLI Configuration", + "routes": [ + { "title": "Overview", "path": "config" }, + { "title": "CLI Configuration", "path": "config/config-file" }, + { + "title": "Environment Variables", + "path": "config/environment-variables" + } + ] + }, + { + "title": "Using Terraform Cloud", + "routes": [ + { "title": "Overview", "path": "cloud" }, + { "title": "Terraform Cloud Settings", "path": "cloud/settings" }, + { + "title": "Initializing and Migrating", + "path": "cloud/migrating" + }, + { + "title": "Command Line Arguments", + "path": "cloud/command-line-arguments" + } + ] + }, + { + "title": "Automating Terraform", + "routes": [ + { + "title": "Running Terraform in Automation", + "href": "https://learn.hashicorp.com/tutorials/terraform/automate-terraform?in=terraform/automation&utm_source=WEBSITE&utm_medium=WEB_IO&utm_offer=ARTICLE_PAGE&utm_content=DOCS" + }, + { + "title": "GitHub Actions", + "href": "https://learn.hashicorp.com/tutorials/terraform/github-actions?utm_source=WEBSITE&utm_medium=WEB_IO&utm_offer=ARTICLE_PAGE&utm_content=DOCS" + } + ] + }, + { + "title": "Alphabetical List of Commands", + "routes": [ + { "title": "Overview", "href": "/cli/commands" }, + { "title": "apply", "href": "/cli/commands/apply" }, + { "title": "console", "href": "/cli/commands/console" }, + { "title": "destroy", "href": "/cli/commands/destroy" }, + { "title": "env", "href": "/cli/commands/env" }, + { "title": "fmt", "href": "/cli/commands/fmt" }, + { + "title": "force-unlock", + "href": "/cli/commands/force-unlock" + }, + { "title": "get", "href": "/cli/commands/get" }, + { "title": "graph", "href": "/cli/commands/graph" }, + { "title": "import", "href": "/cli/commands/import" }, + { "title": "init", "href": "/cli/commands/init" }, + { "title": "login", "href": "/cli/commands/login" }, + { "title": "logout", "href": "/cli/commands/logout" }, + { "title": "output", "href": "/cli/commands/output" }, + { "title": "plan", "href": "/cli/commands/plan" }, + { "title": "providers", "href": "/cli/commands/providers" }, + { + "title": "providers lock", + "href": "/cli/commands/providers/lock" + }, + { + "title": "providers mirror", + "href": "/cli/commands/providers/mirror" + }, + { + "title": "providers schema", + "href": "/cli/commands/providers/schema" + }, + { + "title": "push (deprecated)", + "href": "/cli/commands/push" + }, + { "title": "refresh", "href": "/cli/commands/refresh" }, + { "title": "show", "href": "/cli/commands/show" }, + { "title": "state", "href": "/cli/commands/state" }, + { + "title": "state list", + "href": "/cli/commands/state/list" + }, + { "title": "state mv", "href": "/cli/commands/state/mv" }, + { + "title": "state pull", + "href": "/cli/commands/state/pull" + }, + { + "title": "state push", + "href": "/cli/commands/state/push" + }, + { + "title": "state replace-provider", + "href": "/cli/commands/state/replace-provider" + }, + { "title": "state rm", "href": "/cli/commands/state/rm" }, + { + "title": "state show", + "href": "/cli/commands/state/show" + }, + { "title": "taint", "href": "/cli/commands/taint" }, + { + "title": "test (deprecated)", + "href": "/cli/commands/test" + }, + { "title": "untaint", "href": "/cli/commands/untaint" }, + { "title": "validate", "href": "/cli/commands/validate" }, + { "title": "version", "href": "/cli/commands/version" }, + { "title": "workspace", "href": "/cli/commands/workspace" }, + { + "title": "workspace list", + "href": "/cli/commands/workspace/list" + }, + { + "title": "workspace select", + "href": "/cli/commands/workspace/select" + }, + { + "title": "workspace new", + "href": "/cli/commands/workspace/new" + }, + { + "title": "workspace delete", + "href": "/cli/commands/workspace/delete" + }, + { + "title": "workspace show", + "href": "/cli/commands/workspace/show" + }, + { + "title": "0.12upgrade", + "href": "/cli/commands/0.12upgrade" + }, + { + "title": "0.13upgrade", + "href": "/cli/commands/0.13upgrade" + } + ] + }, + { + "title": "Alphabetical list of commands", + "hidden": true, + "routes": [ + { "title": "Overview", "path": "commands" }, + { "title": "apply", "path": "commands/apply" }, + { "title": "console", "path": "commands/console" }, + { "title": "destroy", "path": "commands/destroy" }, + { "title": "env", "path": "commands/env" }, + { "title": "fmt", "path": "commands/fmt" }, + { "title": "force-unlock", "path": "commands/force-unlock" }, + { "title": "get", "path": "commands/get" }, + { "title": "graph", "path": "commands/graph" }, + { "title": "import", "path": "commands/import" }, + { "title": "init", "path": "commands/init" }, + { "title": "login", "path": "commands/login" }, + { "title": "logout", "path": "commands/logout" }, + { "title": "output", "path": "commands/output" }, + { "title": "plan", "path": "commands/plan" }, + { + "title": "providers", + "routes": [ + { "title": "providers", "path": "commands/providers" }, + { "title": "providers lock", "path": "commands/providers/lock" }, + { "title": "providers mirror", "path": "commands/providers/mirror" }, + { "title": "providers schema", "path": "commands/providers/schema" } + ] + }, + { "title": "push (deprecated)", "path": "commands/push" }, + { "title": "refresh", "path": "commands/refresh" }, + { "title": "show", "path": "commands/show" }, + { + "title": "state", + "routes": [ + { "title": "state", "path": "commands/state" }, + { "title": "state list", "path": "commands/state/list" }, + { "title": "state mv", "path": "commands/state/mv" }, + { "title": "state pull", "path": "commands/state/pull" }, + { "title": "state push", "path": "commands/state/push" }, + { + "title": "state replace-provider", + "path": "commands/state/replace-provider" + }, + { "title": "state rm", "path": "commands/state/rm" }, + { "title": "state show", "path": "commands/state/show" } + ] + }, + { "title": "taint", "path": "commands/taint" }, + { "title": "test (deprecated)", "path": "commands/test", "hidden": true }, + { "title": "untaint", "path": "commands/untaint" }, + { "title": "validate", "path": "commands/validate" }, + { "title": "version", "path": "commands/version" }, + { + "title": "workspace", + "routes": [ + { + "title": "workspace", + "path": "commands/workspace" + }, + { "title": "workspace list", "path": "commands/workspace/list" }, + { "title": "workspace select", "path": "commands/workspace/select" }, + { "title": "workspace new", "path": "commands/workspace/new" }, + { "title": "workspace delete", "path": "commands/workspace/delete" }, + { "title": "workspace show", "path": "commands/workspace/show" } + ] + }, + { "title": "0.12upgrade", "path": "commands/0.12upgrade" }, + { "title": "0.13upgrade", "path": "commands/0.13upgrade" } + ] + }, + { + "title": "Internals", + "routes": [ + { + "title": "Overview", + "href": "/internals" + }, + { + "title": "Credentials Helpers", + "href": "/internals/credentials-helpers" + }, + { + "title": "Debugging Terraform", + "href": "/internals/debugging" + }, + { + "title": "Module Registry Protocol", + "href": "/internals/module-registry-protocol" + }, + { + "title": "Provider Network Mirror Protocol", + "href": "/internals/provider-network-mirror-protocol" + }, + { + "title": "Provider Registry Protocol", + "href": "/internals/provider-registry-protocol" + }, + { + "title": "Resource Graph", + "href": "/internals/graph" + }, + { + "title": "Resource Lifecycle", + "href": "/internals/lifecycle" + }, + { + "title": "Login Protocol", + "href": "/internals/login-protocol" + }, + { + "title": "JSON Output Format", + "href": "/internals/json-format" + }, + { + "title": "Remote Service Discovery", + "href": "/internals/remote-service-discovery" + }, + { + "title": "Provider Metadata", + "href": "/internals/provider-meta" + } + ] + }, + { + "title": "Installation", + "hidden": true, + "routes": [ + { + "title": "APT Packages for Debian and Ubuntu", + "path": "install/apt" + }, + { + "title": "Yum Packages for Red Hat Enterprise Linux, Fedora, and Amazon Linux", + "path": "install/yum" + } + ] + } +] diff --git a/website/data/configuration-nav-data.json b/website/data/configuration-nav-data.json new file mode 100644 index 000000000..b264d9bab --- /dev/null +++ b/website/data/configuration-nav-data.json @@ -0,0 +1,8 @@ +[ + { + "title": "Expressions Landing Page", + "path": "expressions" + }, + { "title": "Modules Landing Page", "path": "modules" }, + { "title": "Resources Landing Page", "path": "resources" } +] diff --git a/website/data/guides-nav-data.json b/website/data/guides-nav-data.json new file mode 100644 index 000000000..05d1727fb --- /dev/null +++ b/website/data/guides-nav-data.json @@ -0,0 +1,12 @@ +[ + { "title": "The Core Terraform Workflow", "href": "/intro/core-workflow" }, + { + "title": "Terraform Integration Program", + "href": "/docs/partnerships" + }, + { + "title": "Terraform Provider Development Program", + "path": "terraform-provider-development-program", + "hidden": true + } +] diff --git a/website/data/internals-nav-data.json b/website/data/internals-nav-data.json new file mode 100644 index 000000000..f3a38292a --- /dev/null +++ b/website/data/internals-nav-data.json @@ -0,0 +1,56 @@ +[ + { + "title": "Credentials Helpers", + "path": "credentials-helpers" + }, + { + "title": "Debugging Terraform", + "path": "debugging" + }, + { + "title": "Module Registry Protocol", + "path": "module-registry-protocol" + }, + { + "title": "Provider Network Mirror Protocol", + "path": "provider-network-mirror-protocol" + }, + { + "title": "Provider Registry Protocol", + "path": "provider-registry-protocol" + }, + { + "title": "Resource Graph", + "path": "graph" + }, + { + "title": "Resource Lifecycle", + "path": "lifecycle" + }, + { + "title": "Login Protocol", + "path": "login-protocol" + }, + { + "title": "JSON Output Format", + "path": "json-format" + }, + { + "title": "Remote Service Discovery", + "path": "remote-service-discovery" + }, + { + "title": "Provider Metadata", + "path": "provider-meta" + }, + { + "title": "Machine Readable UI", + "path": "machine-readable-ui", + "hidden": true + }, + { + "title": "Archiving", + "path": "archiving", + "hidden": true + } +] diff --git a/website/data/intro-nav-data.json b/website/data/intro-nav-data.json new file mode 100644 index 000000000..5df078cbd --- /dev/null +++ b/website/data/intro-nav-data.json @@ -0,0 +1,23 @@ +[ + { "heading": "Introduction to Terraform" }, + { "title": "What is Terraform?", "path": "" }, + { "title": "Use Cases", "path": "use-cases" }, + { + "title": "Getting Started", + "href": "https://learn.hashicorp.com/collections/terraform/aws-get-started?utm_source=WEBSITE&utm_medium=WEB_IO&utm_offer=ARTICLE_PAGE&utm_content=DOCS" + }, + { "title": "The Core Terraform Workflow", "path": "core-workflow" }, + { + "title": "Terraform vs. Other", + "routes": [ + { "title": "Overview", "path": "vs" }, + { "title": "Chef, Puppet, etc.", "path": "vs/chef-puppet" }, + { + "title": "CloudFormation, Heat, etc.", + "path": "vs/cloudformation" + }, + { "title": "Boto, Fog, etc.", "path": "vs/boto" }, + { "title": "Custom Solutions", "path": "vs/custom" } + ] + } +] diff --git a/website/data/language-nav-data.json b/website/data/language-nav-data.json new file mode 100644 index 000000000..24100696c --- /dev/null +++ b/website/data/language-nav-data.json @@ -0,0 +1,1220 @@ +[ + { "heading": "Terraform Language" }, + { "title": "Overview", "path": "" }, + { + "title": "Attributes as Blocks - Configuration Language", + "path": "attr-as-blocks", + "hidden": true + }, + { + "title": "Terraform v1.0 Compatibility Promises", + "path": "v1-compatibility-promises", + "hidden": true + }, + { + "title": "Files and Directories", + "routes": [ + { "title": "Overview", "path": "files" }, + { "title": "Override Files", "path": "files/override" }, + { + "title": "Dependency Lock File", + "path": "files/dependency-lock" + } + ] + }, + { + "title": "Syntax", + "routes": [ + { "title": "Overview", "path": "syntax" }, + { + "title": "Configuration Syntax", + "path": "syntax/configuration" + }, + { + "title": "JSON Configuration Syntax", + "path": "syntax/json" + }, + { "title": "Style Conventions", "path": "syntax/style" } + ] + }, + { + "title": "Resources", + "routes": [ + { "title": "Overview", "path": "resources" }, + { "title": "Resource Blocks", "path": "resources/syntax" }, + { + "title": "Resource Behavior", + "path": "resources/behavior" + }, + { + "title": "Meta-Arguments", + "routes": [ + { + "title": "depends_on", + "href": "/language/meta-arguments/depends_on" + }, + { + "title": "count", + "href": "/language/meta-arguments/count" + }, + { + "title": "for_each", + "href": "/language/meta-arguments/for_each" + }, + { + "title": "provider", + "href": "/language/meta-arguments/resource-provider" + }, + { + "title": "lifecycle", + "href": "/language/meta-arguments/lifecycle" + } + ] + }, + { + "title": "Provisioners", + "routes": [ + { + "title": "Overview", + "path": "resources/provisioners" + }, + { + "title": "Declaring Provisioners", + "path": "resources/provisioners/syntax" + }, + { + "title": "Provisioner Connections", + "path": "resources/provisioners/connection" + }, + { + "title": "Provisioners Without a Resource", + "path": "resources/provisioners/null_resource" + }, + { + "title": "file", + "path": "resources/provisioners/file" + }, + { + "title": "local-exec", + "path": "resources/provisioners/local-exec" + }, + { + "title": "remote-exec", + "path": "resources/provisioners/remote-exec" + }, + { "divider": true }, + { + "title": "chef", + "path": "resources/provisioners/chef" + }, + { + "title": "habitat", + "path": "resources/provisioners/habitat" + }, + { + "title": "puppet", + "path": "resources/provisioners/puppet" + }, + { + "title": "salt-masterless", + "path": "resources/provisioners/salt-masterless" + } + ] + } + ] + }, + { "title": "Data Sources", "path": "data-sources" }, + { + "title": "Meta-Arguments", + "hidden": true, + "routes": [ + { + "title": "count", + "path": "meta-arguments/count" + }, + { + "title": "depends_on", + "path": "meta-arguments/depends_on" + }, + { + "title": "for_each", + "path": "meta-arguments/for_each" + }, + { + "title": "lifecycle", + "path": "meta-arguments/lifecycle" + }, + { + "title": "providers", + "path": "meta-arguments/module-providers" + }, + { + "title": "provider", + "path": "meta-arguments/resource-provider" + } + ] + }, + + { + "title": "Providers", + "routes": [ + { "title": "Overview", "path": "providers" }, + { + "title": "Provider Configuration", + "path": "providers/configuration" + }, + { + "title": "Provider Requirements", + "path": "providers/requirements" + }, + { + "title": "Dependency Lock File", + "href": "/language/files/dependency-lock" + } + ] + }, + { + "title": "Variables and Outputs", + "routes": [ + { "title": "Overview", "path": "values" }, + { "title": "Input Variables", "path": "values/variables" }, + { "title": "Output Values", "path": "values/outputs" }, + { "title": "Local Values", "path": "values/locals" } + ] + }, + { + "title": "Modules", + "routes": [ + { "title": "Overview", "path": "modules" }, + + { "title": "Module Blocks", "path": "modules/syntax" }, + { "title": "Module Sources", "path": "modules/sources" }, + { + "title": "Meta-Arguments", + "routes": [ + { + "title": "providers", + "href": "/language/meta-arguments/module-providers" + }, + { + "title": "depends_on", + "href": "/language/meta-arguments/depends_on" + }, + { + "title": "count", + "href": "/language/meta-arguments/count" + }, + { + "title": "for_each", + "href": "/language/meta-arguments/for_each" + } + ] + }, + { + "title": "Module Development", + "routes": [ + { "title": "Overview", "path": "modules/develop" }, + { + "title": "Standard Module Structure", + "path": "modules/develop/structure" + }, + { + "title": "Providers Within Modules", + "path": "modules/develop/providers" + }, + { + "title": "Best Practices: Module Composition", + "path": "modules/develop/composition" + }, + { + "title": "Publishing Modules", + "path": "modules/develop/publish" + }, + { + "title": "Refactoring Modules", + "path": "modules/develop/refactoring" + } + ] + }, + { + "title": "Module Testing Experiment", + "path": "modules/testing-experiment", + "hidden": true + } + ] + }, + { + "title": "Expressions", + "routes": [ + { "title": "Overview", "path": "expressions" }, + { "title": "Types and Values", "path": "expressions/types" }, + { + "title": "Strings and Templates", + "path": "expressions/strings" + }, + { + "title": "References to Values", + "path": "expressions/references" + }, + { "title": "Operators", "path": "expressions/operators" }, + { + "title": "Function Calls", + "path": "expressions/function-calls" + }, + { + "title": "Conditional Expressions", + "path": "expressions/conditionals" + }, + { "title": "For Expressions", "path": "expressions/for" }, + { + "title": "Splat Expressions", + "path": "expressions/splat" + }, + + { + "title": "Dynamic Blocks", + "path": "expressions/dynamic-blocks" + }, + { + "title": "Type Constraints", + "path": "expressions/type-constraints" + }, + { + "title": "Version Constraints", + "path": "expressions/version-constraints" + } + ] + }, + { + "title": "Functions", + "routes": [ + { "title": "Overview", "path": "functions" }, + { + "title": "Numeric Functions", + "routes": [ + { "title": "abs", "href": "/language/functions/abs" }, + { "title": "ceil", "href": "/language/functions/ceil" }, + { + "title": "floor", + "href": "/language/functions/floor" + }, + { "title": "log", "href": "/language/functions/log" }, + { "title": "max", "href": "/language/functions/max" }, + { "title": "min", "href": "/language/functions/min" }, + { + "title": "parseint", + "href": "/language/functions/parseint" + }, + { "title": "pow", "href": "/language/functions/pow" }, + { + "title": "signum", + "href": "/language/functions/signum" + } + ] + }, + { + "title": "String Functions", + "routes": [ + { + "title": "chomp", + "href": "/language/functions/chomp" + }, + { + "title": "format", + "href": "/language/functions/format" + }, + { + "title": "formatlist", + "href": "/language/functions/formatlist" + }, + { + "title": "indent", + "href": "/language/functions/indent" + }, + { "title": "join", "href": "/language/functions/join" }, + { + "title": "lower", + "href": "/language/functions/lower" + }, + { + "title": "regex", + "href": "/language/functions/regex" + }, + { + "title": "regexall", + "href": "/language/functions/regexall" + }, + { + "title": "replace", + "href": "/language/functions/replace" + }, + { + "title": "split", + "href": "/language/functions/split" + }, + { + "title": "strrev", + "href": "/language/functions/strrev" + }, + { + "title": "substr", + "href": "/language/functions/substr" + }, + { + "title": "title", + "href": "/language/functions/title" + }, + { "title": "trim", "href": "/language/functions/trim" }, + { + "title": "trimprefix", + "href": "/language/functions/trimprefix" + }, + { + "title": "trimsuffix", + "href": "/language/functions/trimsuffix" + }, + { + "title": "trimspace", + "href": "/language/functions/trimspace" + }, + { "title": "upper", "href": "/language/functions/upper" } + ] + }, + { + "title": "Collection Functions", + "routes": [ + { + "title": "alltrue", + "href": "/language/functions/alltrue" + }, + { + "title": "anytrue", + "href": "/language/functions/anytrue" + }, + { + "title": "chunklist", + "href": "/language/functions/chunklist" + }, + { + "title": "coalesce", + "href": "/language/functions/coalesce" + }, + { + "title": "coalescelist", + "href": "/language/functions/coalescelist" + }, + { + "title": "compact", + "href": "/language/functions/compact" + }, + { + "title": "concat", + "href": "/language/functions/concat" + }, + { + "title": "contains", + "href": "/language/functions/contains" + }, + { + "title": "distinct", + "href": "/language/functions/distinct" + }, + { + "title": "element", + "href": "/language/functions/element" + }, + { + "title": "flatten", + "href": "/language/functions/flatten" + }, + { + "title": "index", + "href": "/language/functions/index_function" + }, + { "title": "keys", "href": "/language/functions/keys" }, + { + "title": "length", + "href": "/language/functions/length" + }, + { "title": "list", "href": "/language/functions/list" }, + { + "title": "lookup", + "href": "/language/functions/lookup" + }, + { "title": "map", "href": "/language/functions/map" }, + { + "title": "matchkeys", + "href": "/language/functions/matchkeys" + }, + { + "title": "merge", + "href": "/language/functions/merge" + }, + { "title": "one", "href": "/language/functions/one" }, + { + "title": "range", + "href": "/language/functions/range" + }, + { + "title": "reverse", + "href": "/language/functions/reverse" + }, + { + "title": "setintersection", + "href": "/language/functions/setintersection" + }, + { + "title": "setproduct", + "href": "/language/functions/setproduct" + }, + { + "title": "setsubtract", + "href": "/language/functions/setsubtract" + }, + { + "title": "setunion", + "href": "/language/functions/setunion" + }, + { + "title": "slice", + "href": "/language/functions/slice" + }, + { "title": "sort", "href": "/language/functions/sort" }, + { "title": "sum", "href": "/language/functions/sum" }, + { + "title": "transpose", + "href": "/language/functions/transpose" + }, + { + "title": "values", + "href": "/language/functions/values" + }, + { + "title": "zipmap", + "href": "/language/functions/zipmap" + } + ] + }, + { + "title": "Encoding Functions", + "routes": [ + { + "title": "base64decode", + "href": "/language/functions/base64decode" + }, + { + "title": "base64encode", + "href": "/language/functions/base64encode" + }, + { + "title": "base64gzip", + "href": "/language/functions/base64gzip" + }, + { + "title": "csvdecode", + "href": "/language/functions/csvdecode" + }, + { + "title": "jsondecode", + "href": "/language/functions/jsondecode" + }, + { + "title": "jsonencode", + "href": "/language/functions/jsonencode" + }, + { + "title": "textdecodebase64", + "href": "/language/functions/textdecodebase64" + }, + { + "title": "textencodebase64", + "href": "/language/functions/textencodebase64" + }, + { + "title": "urlencode", + "href": "/language/functions/urlencode" + }, + { + "title": "yamldecode", + "href": "/language/functions/yamldecode" + }, + { + "title": "yamlencode", + "href": "/language/functions/yamlencode" + } + ] + }, + { + "title": "Filesystem Functions", + "routes": [ + { + "title": "abspath", + "href": "/language/functions/abspath" + }, + { + "title": "dirname", + "href": "/language/functions/dirname" + }, + { + "title": "pathexpand", + "href": "/language/functions/pathexpand" + }, + { + "title": "basename", + "href": "/language/functions/basename" + }, + { "title": "file", "href": "/language/functions/file" }, + { + "title": "fileexists", + "href": "/language/functions/fileexists" + }, + { + "title": "fileset", + "href": "/language/functions/fileset" + }, + { + "title": "filebase64", + "href": "/language/functions/filebase64" + }, + { + "title": "templatefile", + "href": "/language/functions/templatefile" + } + ] + }, + { + "title": "Date and Time Functions", + "routes": [ + { + "title": "formatdate", + "href": "/language/functions/formatdate" + }, + { + "title": "timeadd", + "href": "/language/functions/timeadd" + }, + { + "title": "timestamp", + "href": "/language/functions/timestamp" + } + ] + }, + { + "title": "Hash and Crypto Functions", + "routes": [ + { + "title": "base64sha256", + "href": "/language/functions/base64sha256" + }, + { + "title": "base64sha512", + "href": "/language/functions/base64sha512" + }, + { + "title": "bcrypt", + "href": "/language/functions/bcrypt" + }, + { + "title": "filebase64sha256", + "href": "/language/functions/filebase64sha256" + }, + { + "title": "filebase64sha512", + "href": "/language/functions/filebase64sha512" + }, + { + "title": "filemd5", + "href": "/language/functions/filemd5" + }, + { + "title": "filesha1", + "href": "/language/functions/filesha1" + }, + { + "title": "filesha256", + "href": "/language/functions/filesha256" + }, + { + "title": "filesha512", + "href": "/language/functions/filesha512" + }, + { "title": "md5", "href": "/language/functions/md5" }, + { + "title": "rsadecrypt", + "href": "/language/functions/rsadecrypt" + }, + { "title": "sha1", "href": "/language/functions/sha1" }, + { + "title": "sha256", + "href": "/language/functions/sha256" + }, + { + "title": "sha512", + "href": "/language/functions/sha512" + }, + { "title": "uuid", "href": "/language/functions/uuid" }, + { + "title": "uuidv5", + "href": "/language/functions/uuidv5" + } + ] + }, + { + "title": "IP Network Functions", + "routes": [ + { + "title": "cidrhost", + "href": "/language/functions/cidrhost" + }, + { + "title": "cidrnetmask", + "href": "/language/functions/cidrnetmask" + }, + { + "title": "cidrsubnet", + "href": "/language/functions/cidrsubnet" + }, + { + "title": "cidrsubnets", + "href": "/language/functions/cidrsubnets" + } + ] + }, + { + "title": "Type Conversion Functions", + "routes": [ + { "title": "can", "href": "/language/functions/can" }, + { + "title": "defaults", + "href": "/language/functions/defaults" + }, + { + "title": "nonsensitive", + "href": "/language/functions/nonsensitive" + }, + { + "title": "sensitive", + "href": "/language/functions/sensitive" + }, + { + "title": "tobool", + "href": "/language/functions/tobool" + }, + { + "title": "tolist", + "href": "/language/functions/tolist" + }, + { + "title": "tomap", + "href": "/language/functions/tomap" + }, + { + "title": "tonumber", + "href": "/language/functions/tonumber" + }, + { + "title": "toset", + "href": "/language/functions/toset" + }, + { + "title": "tostring", + "href": "/language/functions/tostring" + }, + { "title": "try", "href": "/language/functions/try" }, + { "title": "type", "href": "/language/functions/type" } + ] + }, + { "title": "abs", "path": "functions/abs", "hidden": true }, + { "title": "abspath", "path": "functions/abspath", "hidden": true }, + { "title": "alltrue", "path": "functions/alltrue", "hidden": true }, + { "title": "anytrue", "path": "functions/anytrue", "hidden": true }, + { + "title": "base64decode", + "path": "functions/base64decode", + "hidden": true + }, + { + "title": "base64encode", + "path": "functions/base64encode", + "hidden": true + }, + { "title": "base64gzip", "path": "functions/base64gzip", "hidden": true }, + { + "title": "base64sha256", + "path": "functions/base64sha256", + "hidden": true + }, + { + "title": "base64sha512", + "path": "functions/base64sha512", + "hidden": true + }, + { "title": "basename", "path": "functions/basename", "hidden": true }, + { "title": "bcrypt", "path": "functions/bcrypt", "hidden": true }, + { "title": "can", "path": "functions/can", "hidden": true }, + { "title": "ceil", "path": "functions/ceil", "hidden": true }, + { "title": "chomp", "path": "functions/chomp", "hidden": true }, + { "title": "chunklist", "path": "functions/chunklist", "hidden": true }, + { "title": "cidrhost", "path": "functions/cidrhost", "hidden": true }, + { + "title": "cidrnetmask", + "path": "functions/cidrnetmask", + "hidden": true + }, + { "title": "cidrsubnet", "path": "functions/cidrsubnet", "hidden": true }, + { + "title": "cidrsubnets", + "path": "functions/cidrsubnets", + "hidden": true + }, + { "title": "coalesce", "path": "functions/coalesce", "hidden": true }, + { + "title": "coalescelist", + "path": "functions/coalescelist", + "hidden": true + }, + { "title": "compact", "path": "functions/compact", "hidden": true }, + { "title": "concat", "path": "functions/concat", "hidden": true }, + { "title": "contains", "path": "functions/contains", "hidden": true }, + { "title": "csvdecode", "path": "functions/csvdecode", "hidden": true }, + { "title": "defaults", "path": "functions/defaults", "hidden": true }, + { "title": "dirname", "path": "functions/dirname", "hidden": true }, + { "title": "distinct", "path": "functions/distinct", "hidden": true }, + { "title": "element", "path": "functions/element", "hidden": true }, + { "title": "file", "path": "functions/file", "hidden": true }, + { "title": "filebase64", "path": "functions/filebase64", "hidden": true }, + { + "title": "filebase64sha256", + "path": "functions/filebase64sha256", + "hidden": true + }, + { + "title": "filebase64sha512", + "path": "functions/filebase64sha512", + "hidden": true + }, + { "title": "fileexists", "path": "functions/fileexists", "hidden": true }, + { "title": "filemd5", "path": "functions/filemd5", "hidden": true }, + { "title": "fileset", "path": "functions/fileset", "hidden": true }, + { "title": "filesha1", "path": "functions/filesha1", "hidden": true }, + { "title": "filesha256", "path": "functions/filesha256", "hidden": true }, + { "title": "filesha512", "path": "functions/filesha512", "hidden": true }, + { "title": "flatten", "path": "functions/flatten", "hidden": true }, + { "title": "floor", "path": "functions/floor", "hidden": true }, + { "title": "format", "path": "functions/format", "hidden": true }, + { "title": "formatdate", "path": "functions/formatdate", "hidden": true }, + { "title": "formatlist", "path": "functions/formatlist", "hidden": true }, + { "title": "indent", "path": "functions/indent", "hidden": true }, + { "title": "index", "path": "functions/index_function", "hidden": true }, + { "title": "join", "path": "functions/join", "hidden": true }, + { "title": "jsondecode", "path": "functions/jsondecode", "hidden": true }, + { "title": "jsonencode", "path": "functions/jsonencode", "hidden": true }, + { "title": "keys", "path": "functions/keys", "hidden": true }, + { "title": "length", "path": "functions/length", "hidden": true }, + { "title": "list", "path": "functions/list", "hidden": true }, + { "title": "log", "path": "functions/log", "hidden": true }, + { "title": "lookup", "path": "functions/lookup", "hidden": true }, + { "title": "lower", "path": "functions/lower", "hidden": true }, + { "title": "map", "path": "functions/map", "hidden": true }, + { "title": "matchkeys", "path": "functions/matchkeys", "hidden": true }, + { "title": "max", "path": "functions/max", "hidden": true }, + { "title": "md5", "path": "functions/md5", "hidden": true }, + { "title": "merge", "path": "functions/merge", "hidden": true }, + { "title": "min", "path": "functions/min", "hidden": true }, + { + "title": "nonsensitive", + "path": "functions/nonsensitive", + "hidden": true + }, + { "title": "one", "path": "functions/one", "hidden": true }, + { "title": "parseint", "path": "functions/parseint", "hidden": true }, + { "title": "pathexpand", "path": "functions/pathexpand", "hidden": true }, + { "title": "pow", "path": "functions/pow", "hidden": true }, + { "title": "range", "path": "functions/range", "hidden": true }, + { "title": "regex", "path": "functions/regex", "hidden": true }, + { "title": "regexall", "path": "functions/regexall", "hidden": true }, + { "title": "replace", "path": "functions/replace", "hidden": true }, + { "title": "reverse", "path": "functions/reverse", "hidden": true }, + { "title": "rsadecrypt", "path": "functions/rsadecrypt", "hidden": true }, + { "title": "sensitive", "path": "functions/sensitive", "hidden": true }, + { + "title": "setintersection", + "path": "functions/setintersection", + "hidden": true + }, + { "title": "setproduct", "path": "functions/setproduct", "hidden": true }, + { + "title": "setsubtract", + "path": "functions/setsubtract", + "hidden": true + }, + { "title": "setunion", "path": "functions/setunion", "hidden": true }, + { "title": "sha1", "path": "functions/sha1", "hidden": true }, + { "title": "sha256", "path": "functions/sha256", "hidden": true }, + { "title": "sha512", "path": "functions/sha512", "hidden": true }, + { "title": "signum", "path": "functions/signum", "hidden": true }, + { "title": "slice", "path": "functions/slice", "hidden": true }, + { "title": "sort", "path": "functions/sort", "hidden": true }, + { "title": "split", "path": "functions/split", "hidden": true }, + { "title": "strrev", "path": "functions/strrev", "hidden": true }, + { "title": "substr", "path": "functions/substr", "hidden": true }, + { "title": "sum", "path": "functions/sum", "hidden": true }, + { + "title": "templatefile", + "path": "functions/templatefile", + "hidden": true + }, + { + "title": "textdecodebase64", + "path": "functions/textdecodebase64", + "hidden": true + }, + { + "title": "textencodebase64", + "path": "functions/textencodebase64", + "hidden": true + }, + { "title": "timeadd", "path": "functions/timeadd", "hidden": true }, + { "title": "timestamp", "path": "functions/timestamp", "hidden": true }, + { "title": "title", "path": "functions/title", "hidden": true }, + { "title": "tobool", "path": "functions/tobool", "hidden": true }, + { "title": "tolist", "path": "functions/tolist", "hidden": true }, + { "title": "tomap", "path": "functions/tomap", "hidden": true }, + { "title": "tonumber", "path": "functions/tonumber", "hidden": true }, + { "title": "toset", "path": "functions/toset", "hidden": true }, + { "title": "tostring", "path": "functions/tostring", "hidden": true }, + { "title": "transpose", "path": "functions/transpose", "hidden": true }, + { "title": "trim", "path": "functions/trim", "hidden": true }, + { "title": "trimprefix", "path": "functions/trimprefix", "hidden": true }, + { "title": "trimspace", "path": "functions/trimspace", "hidden": true }, + { "title": "trimsuffix", "path": "functions/trimsuffix", "hidden": true }, + { "title": "try", "path": "functions/try", "hidden": true }, + { "title": "type", "path": "functions/type", "hidden": true }, + { "title": "upper", "path": "functions/upper", "hidden": true }, + { "title": "urlencode", "path": "functions/urlencode", "hidden": true }, + { "title": "uuid", "path": "functions/uuid", "hidden": true }, + { "title": "uuidv5", "path": "functions/uuidv5", "hidden": true }, + { "title": "values", "path": "functions/values", "hidden": true }, + { "title": "yamldecode", "path": "functions/yamldecode", "hidden": true }, + { "title": "yamlencode", "path": "functions/yamlencode", "hidden": true }, + { "title": "zipmap", "path": "functions/zipmap", "hidden": true } + ] + }, + { + "title": "Terraform Settings", + "routes": [ + { "title": "Overview", "path": "settings" }, + { "title": "Terraform Cloud", "path": "settings/terraform-cloud" }, + { + "title": "Backends", + "routes": [ + { "title": "Overview", "path": "settings/backends" }, + { + "title": "Backend Configuration", + "path": "settings/backends/configuration" + }, + { + "title": "Available Backends", + "routes": [ + { + "title": "local", + "href": "/language/settings/backends/local" + }, + { + "title": "remote", + "href": "/language/settings/backends/remote" + }, + { + "title": "artifactory", + "href": "/language/settings/backends/artifactory" + }, + { + "title": "azurerm", + "href": "/language/settings/backends/azurerm" + }, + { + "title": "consul", + "href": "/language/settings/backends/consul" + }, + { + "title": "cos", + "href": "/language/settings/backends/cos" + }, + { + "title": "etcd", + "href": "/language/settings/backends/etcd" + }, + { + "title": "etcdv3", + "href": "/language/settings/backends/etcdv3" + }, + { + "title": "gcs", + "href": "/language/settings/backends/gcs" + }, + { + "title": "http", + "href": "/language/settings/backends/http" + }, + { + "title": "Kubernetes", + "href": "/language/settings/backends/kubernetes" + }, + { + "title": "manta", + "href": "/language/settings/backends/manta" + }, + { + "title": "oss", + "href": "/language/settings/backends/oss" + }, + { + "title": "pg", + "href": "/language/settings/backends/pg" + }, + { + "title": "s3", + "href": "/language/settings/backends/s3" + }, + { + "title": "swift", + "href": "/language/settings/backends/swift" + } + ] + }, + { + "title": "local", + "hidden": true, + "path": "settings/backends/local" + }, + { + "title": "remote", + "hidden": true, + "path": "settings/backends/remote" + }, + { + "title": "artifactory", + "hidden": true, + "path": "settings/backends/artifactory" + }, + { + "title": "azurerm", + "hidden": true, + "path": "settings/backends/azurerm" + }, + { + "title": "consul", + "hidden": true, + "path": "settings/backends/consul" + }, + { + "title": "cos", + "hidden": true, + "path": "settings/backends/cos" + }, + { + "title": "etcd", + "hidden": true, + "path": "settings/backends/etcd" + }, + { + "title": "etcdv3", + "hidden": true, + "path": "settings/backends/etcdv3" + }, + { + "title": "gcs", + "hidden": true, + "path": "settings/backends/gcs" + }, + { + "title": "http", + "hidden": true, + "path": "settings/backends/http" + }, + { + "title": "Kubernetes", + "hidden": true, + "path": "settings/backends/kubernetes" + }, + { + "title": "manta", + "hidden": true, + "path": "settings/backends/manta" + }, + { + "title": "oss", + "hidden": true, + "path": "settings/backends/oss" + }, + { + "title": "pg", + "hidden": true, + "path": "settings/backends/pg" + }, + { + "title": "s3", + "hidden": true, + "path": "settings/backends/s3" + }, + { + "title": "swift", + "hidden": true, + "path": "settings/backends/swift" + } + ] + } + ] + }, + { + "title": "State", + "routes": [ + { "title": "Overview", "path": "state" }, + { "title": "Purpose", "path": "state/purpose" }, + { + "title": "The terraform_remote_state Data Source", + "path": "state/remote-state-data" + }, + { + "title": "Backends: State Storage and Locking", + "path": "state/backends" + }, + { + "title": "Import Existing Resources", + "path": "state/import" + }, + { "title": "Locking", "path": "state/locking" }, + { "title": "Workspaces", "path": "state/workspaces" }, + { "title": "Remote State", "path": "state/remote" }, + { + "title": "Sensitive Data", + "path": "state/sensitive-data" + } + ] + }, + { + "title": "Upgrade Guides", + "routes": [ + { "title": "Overview", "path": "upgrade-guides" }, + { + "title": "Upgrading to Terraform v1.1", + "path": "upgrade-guides/1-1" + }, + { + "title": "Upgrading to Terraform v1.0", + "path": "upgrade-guides/1-0" + }, + { + "title": "v1.0 Compatibility Promises", + "href": "/language/v1-compatibility-promises" + }, + { + "title": "Upgrading to Terraform v0.15", + "path": "upgrade-guides/0-15" + }, + { + "title": "Upgrading to Terraform v0.14", + "path": "upgrade-guides/0-14" + }, + { + "title": "Upgrading to Terraform v0.13", + "path": "upgrade-guides/0-13" + }, + { + "title": "Upgrading to Terraform v0.12", + "path": "upgrade-guides/0-12" + }, + { + "title": "Upgrading to Terraform v0.11", + "path": "upgrade-guides/0-11" + }, + { + "title": "Upgrading to Terraform v0.10", + "path": "upgrade-guides/0-10" + }, + { + "title": "Upgrading to Terraform v0.9", + "path": "upgrade-guides/0-9" + }, + { + "title": "Upgrading to Terraform v0.8", + "path": "upgrade-guides/0-8" + }, + { + "title": "Upgrading to Terraform v0.7", + "path": "upgrade-guides/0-7" + } + ] + }, + { + "title": "Historical docs: 0.11 and Older", + "routes": [ + { "title": "Overview", "path": "configuration-0-11" }, + { + "title": "Load Order and Semantics", + "path": "configuration-0-11/load" + }, + { + "title": "Configuration Syntax", + "path": "configuration-0-11/syntax" + }, + { + "title": "Interpolation Syntax", + "path": "configuration-0-11/interpolation" + }, + { + "title": "Overrides", + "path": "configuration-0-11/override" + }, + { + "title": "Resources", + "path": "configuration-0-11/resources" + }, + { + "title": "Data Sources", + "path": "configuration-0-11/data-sources" + }, + { + "title": "Providers", + "href": "/language/providers" + }, + { + "title": "Variables", + "path": "configuration-0-11/variables" + }, + { + "title": "Outputs", + "path": "configuration-0-11/outputs" + }, + { + "title": "Local Values", + "path": "configuration-0-11/locals" + }, + { + "title": "Modules", + "path": "configuration-0-11/modules" + }, + { + "title": "Terraform", + "path": "configuration-0-11/terraform" + }, + { + "title": "Provisioners", + "href": "/language/resources/provisioners" + }, + { + "title": "Providers", + "path": "configuration-0-11/providers", + "hidden": true + }, + { + "title": "Terraform Push (deprecated)", + "path": "configuration-0-11/terraform-enterprise" + }, + { + "title": "Environment Variables", + "path": "configuration-0-11/environment-variables" + } + ] + } +] From e35c25da4459f7794d3f0abffdf558e46e9a214c Mon Sep 17 00:00:00 2001 From: Martin Atkins Date: Wed, 22 Dec 2021 11:36:35 -0800 Subject: [PATCH 11/68] website: Try function documentation "provably" vs "probably" typo This paragraph is trying to say that try only works for dynamic errors and not for errors that are _not_ based on dynamic decision-making in expressions. I'm not sure if this typo was always here or if it was mistakenly "corrected" at some point, but either way the word "probably" changes the meaning of this sentence entirely, making it seem like Terraform is hedging the likelihood of a problem rather than checking exactly for one. --- website/docs/language/functions/try.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/language/functions/try.mdx b/website/docs/language/functions/try.mdx index d7c1f0ede..a23d1def5 100644 --- a/website/docs/language/functions/try.mdx +++ b/website/docs/language/functions/try.mdx @@ -94,7 +94,7 @@ fallback ``` The `try` function will _not_ catch errors relating to constructs that are -probably invalid even before dynamic expression evaluation, such as a malformed +provably invalid even before dynamic expression evaluation, such as a malformed reference or a reference to a top-level object that has not been declared: ``` From 75ef61c7833962038d8832a420a366721cfc2a02 Mon Sep 17 00:00:00 2001 From: James Bardin Date: Tue, 21 Dec 2021 15:08:48 -0500 Subject: [PATCH 12/68] check for nested module index changes Changing only the index on a nested module will cause all nested moves to create cycles, since their full addresses will match both the From and To addresses. When building the dependency graph, check if the parent is only changing the index of the containing module, and prevent the backwards edge for the move. --- internal/addrs/move_endpoint_module.go | 40 +++++----------- internal/addrs/move_endpoint_module_test.go | 21 ++++++++- internal/refactoring/move_execute.go | 30 ++++++++++-- internal/refactoring/move_validate_test.go | 52 +++++++++++++++++++++ 4 files changed, 109 insertions(+), 34 deletions(-) diff --git a/internal/addrs/move_endpoint_module.go b/internal/addrs/move_endpoint_module.go index 7ff17621b..fdc8a5c25 100644 --- a/internal/addrs/move_endpoint_module.go +++ b/internal/addrs/move_endpoint_module.go @@ -705,10 +705,10 @@ func (r AbsResourceInstance) MoveDestination(fromMatch, toMatch *MoveEndpointInM } } -// IsModuleMoveReIndex takes the from and to endpoints from a move statement, -// and returns true if the only changes are to module indexes, and all -// non-absolute paths remain the same. -func IsModuleMoveReIndex(from, to *MoveEndpointInModule) bool { +// IsModuleReIndex takes the From and To endpoints from a single move +// statement, and returns true if the only changes are to module indexes, and +// all non-absolute paths remain the same. +func (from *MoveEndpointInModule) IsModuleReIndex(to *MoveEndpointInModule) bool { // The statements must originate from the same module. if !from.module.Equal(to.module) { panic("cannot compare move expressions from different modules") @@ -718,37 +718,21 @@ func IsModuleMoveReIndex(from, to *MoveEndpointInModule) bool { case AbsModuleCall: switch t := to.relSubject.(type) { case ModuleInstance: - if len(t) != 1 { - // An AbsModuleCall only ever has one segment, so the - // ModuleInstance length must match. - return false - } - - return f.Call.Name == t[0].Name + // Generate a synthetic module to represent the full address of + // the module call. We're not actually comparing indexes, so the + // instance doesn't matter. + callAddr := f.Instance(NoKey).Module() + return callAddr.Equal(t.Module()) } case ModuleInstance: switch t := to.relSubject.(type) { case AbsModuleCall: - if len(f) != 1 { - return false - } - - return f[0].Name == t.Call.Name + callAddr := t.Instance(NoKey).Module() + return callAddr.Equal(f.Module()) case ModuleInstance: - // We must have the same number of segments, and the names must all - // match in order for this to solely be an index change operation. - if len(f) != len(t) { - return false - } - - for i := range f { - if f[i].Name != t[i].Name { - return false - } - } - return true + return t.Module().Equal(f.Module()) } } diff --git a/internal/addrs/move_endpoint_module_test.go b/internal/addrs/move_endpoint_module_test.go index 1e2758239..c1643d44c 100644 --- a/internal/addrs/move_endpoint_module_test.go +++ b/internal/addrs/move_endpoint_module_test.go @@ -1686,6 +1686,25 @@ func TestIsModuleMoveReIndex(t *testing.T) { }, expect: false, }, + + { + from: AbsModuleCall{ + Module: mustParseModuleInstanceStr(`module.bar[0]`), + Call: ModuleCall{Name: "baz"}, + }, + to: mustParseModuleInstanceStr(`module.bar.module.baz[0]`), + expect: true, + }, + + { + from: mustParseModuleInstanceStr(`module.bar.module.baz[0]`), + to: AbsModuleCall{ + Module: mustParseModuleInstanceStr(`module.bar[0]`), + Call: ModuleCall{Name: "baz"}, + }, + expect: true, + }, + { from: mustParseModuleInstanceStr(`module.baz`), to: mustParseModuleInstanceStr(`module.bar.module.baz[0]`), @@ -1709,7 +1728,7 @@ func TestIsModuleMoveReIndex(t *testing.T) { relSubject: test.to, } - if got := IsModuleMoveReIndex(from, to); got != test.expect { + if got := from.IsModuleReIndex(to); got != test.expect { t.Errorf("expected %t, got %t", test.expect, got) } }, diff --git a/internal/refactoring/move_execute.go b/internal/refactoring/move_execute.go index 7f5fbae23..9d5b25520 100644 --- a/internal/refactoring/move_execute.go +++ b/internal/refactoring/move_execute.go @@ -238,11 +238,31 @@ func statementDependsOn(a, b *MoveStatement) bool { // // Since we are only interested in checking if A depends on B, we only need // to check the 4 possibilities above which result in B being executed - // first. - return a.From.NestedWithin(b.To) || - a.To.NestedWithin(b.To) || - b.From.NestedWithin(a.From) || - b.To.NestedWithin(a.From) + // first. If we're there's no dependency at all we can return immediately. + if !(a.From.NestedWithin(b.To) || a.To.NestedWithin(b.To) || + b.From.NestedWithin(a.From) || b.To.NestedWithin(a.From)) { + return false + } + + // If a nested move has a dependency, we need to rule out the possibility + // that this is a move inside a module only changing indexes. If an + // ancestor module is only changing the index of a nested module, any + // nested move statements are going to match both the From and To address + // when the base name is not changing, causing a cycle in the order of + // operations. + + // if A is not declared in an ancestor module, then we can't be nested + // within a module index change. + if len(a.To.Module()) >= len(b.To.Module()) { + return true + } + // We only want the nested move statement to depend on the outer module + // move, so we only test this in the reverse direction. + if a.From.IsModuleReIndex(a.To) { + return false + } + + return true } // MoveResults describes the outcome of an ApplyMoves call. diff --git a/internal/refactoring/move_validate_test.go b/internal/refactoring/move_validate_test.go index 53bbbe6c2..60122511f 100644 --- a/internal/refactoring/move_validate_test.go +++ b/internal/refactoring/move_validate_test.go @@ -404,6 +404,58 @@ Each resource can have moved from only one source resource.`, }, WantError: `Resource type mismatch: This statement declares a move from test.nonexist1[0] to other.single, which is a resource instance of a different type.`, }, + "crossing nested statements": { + // overlapping nested moves will result in a cycle. + Statements: []MoveStatement{ + makeTestMoveStmt(t, ``, + `module.nonexist.test.single`, + `module.count[0].test.count[0]`, + ), + makeTestMoveStmt(t, ``, + `module.nonexist`, + `module.count[0]`, + ), + }, + WantError: `Cyclic dependency in move statements: The following chained move statements form a cycle, and so there is no final location to move objects to: + - test:1,1: module.nonexist → module.count[0] + - test:1,1: module.nonexist.test.single → module.count[0].test.count[0] + +A chain of move statements must end with an address that doesn't appear in any other statements, and which typically also refers to an object still declared in the configuration.`, + }, + "fully contained nested statements": { + // we have to avoid a cycle because the nested moves appear in both + // the from and to address of the parent when only the module index + // is changing. + Statements: []MoveStatement{ + makeTestMoveStmt(t, `count`, + `test.count`, + `test.count[0]`, + ), + makeTestMoveStmt(t, ``, + `module.count`, + `module.count[0]`, + ), + }, + }, + "double fully contained nested statements": { + // we have to avoid a cycle because the nested moves appear in both + // the from and to address of the parent when only the module index + // is changing. + Statements: []MoveStatement{ + makeTestMoveStmt(t, `count`, + `module.count`, + `module.count[0]`, + ), + makeTestMoveStmt(t, `count.count`, + `test.count`, + `test.count[0]`, + ), + makeTestMoveStmt(t, ``, + `module.count`, + `module.count[0]`, + ), + }, + }, } for name, test := range tests { From bdf683802d2d3b58f5e8d3fa9c75606598be94c2 Mon Sep 17 00:00:00 2001 From: Masayuki Morita Date: Mon, 27 Dec 2021 14:29:02 +0900 Subject: [PATCH 13/68] docs: Fix typo in docs/plugin-protocol/releasing-new-version.md --- docs/plugin-protocol/releasing-new-version.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/plugin-protocol/releasing-new-version.md b/docs/plugin-protocol/releasing-new-version.md index 2449b5c3d..197a1a5b5 100644 --- a/docs/plugin-protocol/releasing-new-version.md +++ b/docs/plugin-protocol/releasing-new-version.md @@ -41,7 +41,7 @@ Modify the `SupportedPluginProtocols` variable in hashicorp/terraform's Use the provider test framework to test a provider written with the new protocol. This end-to-end test ensures that providers written with the new -protocol work correctly wtih the test framework, especially in communicating +protocol work correctly with the test framework, especially in communicating the protocol version between the test framework and Terraform. ## Test Retrieving and Running a Provider From the Registry From 01be55b5c4b056126cd8f3dc8e5587fb03994851 Mon Sep 17 00:00:00 2001 From: Addison Grant Date: Mon, 27 Dec 2021 10:25:31 -0800 Subject: [PATCH 14/68] Update variables.mdx Remove extraneous word "from" in "...use a sensitive value from as part of an..." --- website/docs/language/values/variables.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/language/values/variables.mdx b/website/docs/language/values/variables.mdx index fc8de6e4e..4bfe6e053 100644 --- a/website/docs/language/values/variables.mdx +++ b/website/docs/language/values/variables.mdx @@ -276,7 +276,7 @@ which will cause Terraform to hide it from regular output regardless of how you assign it a value. For more information, see [Sensitive Resource Attributes](/language/expressions/references#sensitive-resource-attributes). -If you use a sensitive value from as part of an +If you use a sensitive value as part of an [output value](/language/values/outputs) then Terraform will require you to also mark the output value itself as sensitive, to confirm that you intended to export it. From 27cc49fe910d61658af4d9542020cd0205afea62 Mon Sep 17 00:00:00 2001 From: Mukesh Kumar Date: Sat, 1 Jan 2022 20:35:47 +0530 Subject: [PATCH 15/68] Update local values definition The definition of local values used "it" many times, making the sentence not very clear. --- website/docs/language/values/locals.mdx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/website/docs/language/values/locals.mdx b/website/docs/language/values/locals.mdx index aa529f9fe..6706ad91d 100644 --- a/website/docs/language/values/locals.mdx +++ b/website/docs/language/values/locals.mdx @@ -12,8 +12,8 @@ description: >- > tutorial on HashiCorp Learn. A local value assigns a name to an [expression](/language/expressions), -so you can use it multiple times within a module without repeating -it. +so you can use the name multiple times within a module without repeating +the expression. If you're familiar with traditional programming languages, it can be useful to compare Terraform modules to function definitions: From 74761b2f8be9b80e3ceef7990c94e27fbba4660b Mon Sep 17 00:00:00 2001 From: Martin Atkins Date: Wed, 22 Dec 2021 15:05:07 -0800 Subject: [PATCH 16/68] getmodules: Use go-getter v1.5.10 and return to upstream GitGetter There was an unintended regression in go-getter v1.5.9's GitGetter which caused us to temporarily fork that particular getter into Terraform to expedite a fix. However, upstream v1.5.10 now includes a functionally-equivalent fix and so we can heal that fork by upgrading. We'd also neglected to update the Module Sources docs when upgrading to go-getter v1.5.9 originally and so we were missing documentation about the new "depth" argument to enable shadow cloning, which I've added retroactively here along with documenting its restriction of only supporting named refs. This new go-getter release also introduces a new credentials-passing method for the Google Cloud Storage getter, and so we must incorporate that into the Terraform-level documentation about module sources. --- go.mod | 4 +- go.sum | 4 +- internal/getmodules/getter.go | 2 +- internal/getmodules/git_getter.go | 416 ----------- internal/getmodules/git_getter_test.go | 827 ---------------------- website/docs/language/modules/sources.mdx | 38 +- 6 files changed, 37 insertions(+), 1254 deletions(-) delete mode 100644 internal/getmodules/git_getter.go delete mode 100644 internal/getmodules/git_getter_test.go diff --git a/go.mod b/go.mod index d68dfd7cb..194b60ca7 100644 --- a/go.mod +++ b/go.mod @@ -36,12 +36,11 @@ require ( github.com/hashicorp/go-azure-helpers v0.18.0 github.com/hashicorp/go-checkpoint v0.5.0 github.com/hashicorp/go-cleanhttp v0.5.2 - github.com/hashicorp/go-getter v1.5.9 + github.com/hashicorp/go-getter v1.5.10 github.com/hashicorp/go-hclog v0.15.0 github.com/hashicorp/go-multierror v1.1.1 github.com/hashicorp/go-plugin v1.4.3 github.com/hashicorp/go-retryablehttp v0.7.0 - github.com/hashicorp/go-safetemp v1.0.0 github.com/hashicorp/go-tfe v0.21.0 github.com/hashicorp/go-uuid v1.0.2 github.com/hashicorp/go-version v1.3.0 @@ -145,6 +144,7 @@ require ( github.com/hashicorp/go-immutable-radix v1.0.0 // indirect github.com/hashicorp/go-msgpack v0.5.4 // indirect github.com/hashicorp/go-rootcerts v1.0.2 // indirect + github.com/hashicorp/go-safetemp v1.0.0 // indirect github.com/hashicorp/go-slug v0.7.0 // indirect github.com/hashicorp/golang-lru v0.5.1 // indirect github.com/hashicorp/jsonapi v0.0.0-20210826224640-ee7dae0fb22d // indirect diff --git a/go.sum b/go.sum index e7d439d84..8ef3722b7 100644 --- a/go.sum +++ b/go.sum @@ -375,8 +375,8 @@ github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9n github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= github.com/hashicorp/go-cty v1.4.1-0.20200414143053-d3edf31b6320/go.mod h1:EiZBMaudVLy8fmjf9Npq1dq9RalhveqZG5w/yz3mHWs= github.com/hashicorp/go-getter v1.5.3/go.mod h1:BrrV/1clo8cCYu6mxvboYg+KutTiFnXjMEgDD8+i7ZI= -github.com/hashicorp/go-getter v1.5.9 h1:b7ahZW50iQiUek/at3CvZhPK1/jiV6CtKcsJiR6E4R0= -github.com/hashicorp/go-getter v1.5.9/go.mod h1:BrrV/1clo8cCYu6mxvboYg+KutTiFnXjMEgDD8+i7ZI= +github.com/hashicorp/go-getter v1.5.10 h1:EN9YigTlv5Ola0IuleFzQGuaYPPHHtWusP/5AypWEMs= +github.com/hashicorp/go-getter v1.5.10/go.mod h1:9i48BP6wpWweI/0/+FBjqLrp9S8XtwUGjiu0QkWHEaY= github.com/hashicorp/go-hclog v0.0.0-20180709165350-ff2cf002a8dd/go.mod h1:9bjs9uLqI8l75knNv3lV1kA55veR+WUPSiKIWcQHudI= github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ= github.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= diff --git a/internal/getmodules/getter.go b/internal/getmodules/getter.go index ba4c8d89c..95f334762 100644 --- a/internal/getmodules/getter.go +++ b/internal/getmodules/getter.go @@ -73,7 +73,7 @@ var goGetterDecompressors = map[string]getter.Decompressor{ var goGetterGetters = map[string]getter.Getter{ "file": new(getter.FileGetter), "gcs": new(getter.GCSGetter), - "git": new(gitGetter), + "git": new(getter.GitGetter), "hg": new(getter.HgGetter), "s3": new(getter.S3Getter), "http": getterHTTPGetter, diff --git a/internal/getmodules/git_getter.go b/internal/getmodules/git_getter.go deleted file mode 100644 index 1b811b8fb..000000000 --- a/internal/getmodules/git_getter.go +++ /dev/null @@ -1,416 +0,0 @@ -package getmodules - -import ( - "bytes" - "context" - "encoding/base64" - "fmt" - "io/ioutil" - "net/url" - "os" - "os/exec" - "path/filepath" - "regexp" - "runtime" - "strconv" - "strings" - "syscall" - - getter "github.com/hashicorp/go-getter" - urlhelper "github.com/hashicorp/go-getter/helper/url" - safetemp "github.com/hashicorp/go-safetemp" - version "github.com/hashicorp/go-version" -) - -// getter is our base getter; it regroups -// fields all getters have in common. -type getterCommon struct { - client *getter.Client -} - -func (g *getterCommon) SetClient(c *getter.Client) { g.client = c } - -// Context tries to returns the Contex from the getter's -// client. otherwise context.Background() is returned. -func (g *getterCommon) Context() context.Context { - if g == nil || g.client == nil { - return context.Background() - } - return g.client.Ctx -} - -// gitGetter is a temporary fork of getter.GitGetter to allow us to tactically -// fix https://github.com/hashicorp/terraform/issues/30119 only within -// Terraform. -// -// This should be only a brief workaround to help us decouple work on the -// Terraform CLI v1.1.1 release so that we can get it done without having to -// coordinate with every other go-getter caller first. However, this fork -// should be healed promptly after v1.1.1 by upstreaming something like this -// fix into upstream go-getter, so that other go-getter callers can also -// benefit from it. -type gitGetter struct { - getterCommon -} - -var defaultBranchRegexp = regexp.MustCompile(`\s->\sorigin/(.*)`) -var lsRemoteSymRefRegexp = regexp.MustCompile(`ref: refs/heads/([^\s]+).*`) - -func (g *gitGetter) ClientMode(_ *url.URL) (getter.ClientMode, error) { - return getter.ClientModeDir, nil -} - -func (g *gitGetter) Get(dst string, u *url.URL) error { - ctx := g.Context() - if _, err := exec.LookPath("git"); err != nil { - return fmt.Errorf("git must be available and on the PATH") - } - - // The port number must be parseable as an integer. If not, the user - // was probably trying to use a scp-style address, in which case the - // ssh:// prefix must be removed to indicate that. - // - // This is not necessary in versions of Go which have patched - // CVE-2019-14809 (e.g. Go 1.12.8+) - if portStr := u.Port(); portStr != "" { - if _, err := strconv.ParseUint(portStr, 10, 16); err != nil { - return fmt.Errorf("invalid port number %q; if using the \"scp-like\" git address scheme where a colon introduces the path instead, remove the ssh:// portion and use just the git:: prefix", portStr) - } - } - - // Extract some query parameters we use - var ref, sshKey string - depth := 0 // 0 means "not set" - q := u.Query() - if len(q) > 0 { - ref = q.Get("ref") - q.Del("ref") - - sshKey = q.Get("sshkey") - q.Del("sshkey") - - if n, err := strconv.Atoi(q.Get("depth")); err == nil { - depth = n - } - q.Del("depth") - - // Copy the URL - var newU url.URL = *u - u = &newU - u.RawQuery = q.Encode() - } - - var sshKeyFile string - if sshKey != "" { - // Check that the git version is sufficiently new. - if err := checkGitVersion("2.3"); err != nil { - return fmt.Errorf("Error using ssh key: %v", err) - } - - // We have an SSH key - decode it. - raw, err := base64.StdEncoding.DecodeString(sshKey) - if err != nil { - return err - } - - // Create a temp file for the key and ensure it is removed. - fh, err := ioutil.TempFile("", "go-getter") - if err != nil { - return err - } - sshKeyFile = fh.Name() - defer os.Remove(sshKeyFile) - - // Set the permissions prior to writing the key material. - if err := os.Chmod(sshKeyFile, 0600); err != nil { - return err - } - - // Write the raw key into the temp file. - _, err = fh.Write(raw) - fh.Close() - if err != nil { - return err - } - } - - // Clone or update the repository - _, err := os.Stat(dst) - if err != nil && !os.IsNotExist(err) { - return err - } - if err == nil { - err = g.update(ctx, dst, sshKeyFile, ref, depth) - } else { - err = g.clone(ctx, dst, sshKeyFile, u, ref, depth) - } - if err != nil { - return err - } - - // Next: check out the proper tag/branch if it is specified, and checkout - if ref != "" { - if err := g.checkout(dst, ref); err != nil { - return err - } - } - - // Lastly, download any/all submodules. - return g.fetchSubmodules(ctx, dst, sshKeyFile, depth) -} - -// GetFile for Git doesn't support updating at this time. It will download -// the file every time. -func (g *gitGetter) GetFile(dst string, u *url.URL) error { - td, tdcloser, err := safetemp.Dir("", "getter") - if err != nil { - return err - } - defer tdcloser.Close() - - // Get the filename, and strip the filename from the URL so we can - // just get the repository directly. - filename := filepath.Base(u.Path) - u.Path = filepath.Dir(u.Path) - - // Get the full repository - if err := g.Get(td, u); err != nil { - return err - } - - // Copy the single file - u, err = urlhelper.Parse(fmtFileURL(filepath.Join(td, filename))) - if err != nil { - return err - } - - fg := &getter.FileGetter{Copy: true} - return fg.GetFile(dst, u) -} - -func (g *gitGetter) checkout(dst string, ref string) error { - cmd := exec.Command("git", "checkout", ref) - cmd.Dir = dst - return getRunCommand(cmd) -} - -// gitCommitIDRegex is a pattern intended to match strings that seem -// "likely to be" git commit IDs, rather than named refs. This cannot be -// an exact decision because it's valid to name a branch or tag after a series -// of hexadecimal digits too. -// -// We require at least 7 digits here because that's the smallest size git -// itself will typically generate, and so it'll reduce the risk of false -// positives on short branch names that happen to also be "hex words". -var gitCommitIDRegex = regexp.MustCompile("^[0-9a-fA-F]{7,40}$") - -func (g *gitGetter) clone(ctx context.Context, dst, sshKeyFile string, u *url.URL, ref string, depth int) error { - args := []string{"clone"} - - autoBranch := false - if ref == "" { - ref = findRemoteDefaultBranch(u) - autoBranch = true - } - if depth > 0 { - args = append(args, "--depth", strconv.Itoa(depth)) - args = append(args, "--branch", ref) - } - args = append(args, u.String(), dst) - - cmd := exec.CommandContext(ctx, "git", args...) - setupGitEnv(cmd, sshKeyFile) - err := getRunCommand(cmd) - if err != nil { - if depth > 0 && !autoBranch { - // If we're creating a shallow clone then the given ref must be - // a named ref (branch or tag) rather than a commit directly. - // We can't accurately recognize the resulting error here without - // hard-coding assumptions about git's human-readable output, but - // we can at least try a heuristic. - if gitCommitIDRegex.MatchString(ref) { - return fmt.Errorf("%w (note that setting 'depth' requires 'ref' to be a branch or tag name)", err) - } - } - return err - } - - if depth < 1 && !autoBranch { - // If we didn't add --depth and --branch above then we will now be - // on the remote repository's default branch, rather than the selected - // ref, so we'll need to fix that before we return. - return g.checkout(dst, ref) - } - return nil -} - -func (g *gitGetter) update(ctx context.Context, dst, sshKeyFile, ref string, depth int) error { - // Determine if we're a branch. If we're NOT a branch, then we just - // switch to master prior to checking out - cmd := exec.CommandContext(ctx, "git", "show-ref", "-q", "--verify", "refs/heads/"+ref) - cmd.Dir = dst - - if getRunCommand(cmd) != nil { - // Not a branch, switch to default branch. This will also catch - // non-existent branches, in which case we want to switch to default - // and then checkout the proper branch later. - ref = findDefaultBranch(dst) - } - - // We have to be on a branch to pull - if err := g.checkout(dst, ref); err != nil { - return err - } - - if depth > 0 { - cmd = exec.Command("git", "pull", "--depth", strconv.Itoa(depth), "--ff-only") - } else { - cmd = exec.Command("git", "pull", "--ff-only") - } - - cmd.Dir = dst - setupGitEnv(cmd, sshKeyFile) - return getRunCommand(cmd) -} - -// fetchSubmodules downloads any configured submodules recursively. -func (g *gitGetter) fetchSubmodules(ctx context.Context, dst, sshKeyFile string, depth int) error { - args := []string{"submodule", "update", "--init", "--recursive"} - if depth > 0 { - args = append(args, "--depth", strconv.Itoa(depth)) - } - cmd := exec.CommandContext(ctx, "git", args...) - cmd.Dir = dst - setupGitEnv(cmd, sshKeyFile) - return getRunCommand(cmd) -} - -// findDefaultBranch checks the repo's origin remote for its default branch -// (generally "master"). "master" is returned if an origin default branch -// can't be determined. -func findDefaultBranch(dst string) string { - var stdoutbuf bytes.Buffer - cmd := exec.Command("git", "branch", "-r", "--points-at", "refs/remotes/origin/HEAD") - cmd.Dir = dst - cmd.Stdout = &stdoutbuf - err := cmd.Run() - matches := defaultBranchRegexp.FindStringSubmatch(stdoutbuf.String()) - if err != nil || matches == nil { - return "master" - } - return matches[len(matches)-1] -} - -// findRemoteDefaultBranch checks the remote repo's HEAD symref to return the remote repo's -// default branch. "master" is returned if no HEAD symref exists. -func findRemoteDefaultBranch(u *url.URL) string { - var stdoutbuf bytes.Buffer - cmd := exec.Command("git", "ls-remote", "--symref", u.String(), "HEAD") - cmd.Stdout = &stdoutbuf - err := cmd.Run() - matches := lsRemoteSymRefRegexp.FindStringSubmatch(stdoutbuf.String()) - if err != nil || matches == nil { - return "master" - } - return matches[len(matches)-1] -} - -// setupGitEnv sets up the environment for the given command. This is used to -// pass configuration data to git and ssh and enables advanced cloning methods. -func setupGitEnv(cmd *exec.Cmd, sshKeyFile string) { - const gitSSHCommand = "GIT_SSH_COMMAND=" - var sshCmd []string - - // If we have an existing GIT_SSH_COMMAND, we need to append our options. - // We will also remove our old entry to make sure the behavior is the same - // with versions of Go < 1.9. - env := os.Environ() - for i, v := range env { - if strings.HasPrefix(v, gitSSHCommand) && len(v) > len(gitSSHCommand) { - sshCmd = []string{v} - - env[i], env[len(env)-1] = env[len(env)-1], env[i] - env = env[:len(env)-1] - break - } - } - - if len(sshCmd) == 0 { - sshCmd = []string{gitSSHCommand + "ssh"} - } - - if sshKeyFile != "" { - // We have an SSH key temp file configured, tell ssh about this. - if runtime.GOOS == "windows" { - sshKeyFile = strings.Replace(sshKeyFile, `\`, `/`, -1) - } - sshCmd = append(sshCmd, "-i", sshKeyFile) - } - - env = append(env, strings.Join(sshCmd, " ")) - cmd.Env = env -} - -// checkGitVersion is used to check the version of git installed on the system -// against a known minimum version. Returns an error if the installed version -// is older than the given minimum. -func checkGitVersion(min string) error { - want, err := version.NewVersion(min) - if err != nil { - return err - } - - out, err := exec.Command("git", "version").Output() - if err != nil { - return err - } - - fields := strings.Fields(string(out)) - if len(fields) < 3 { - return fmt.Errorf("Unexpected 'git version' output: %q", string(out)) - } - v := fields[2] - if runtime.GOOS == "windows" && strings.Contains(v, ".windows.") { - // on windows, git version will return for example: - // git version 2.20.1.windows.1 - // Which does not follow the semantic versionning specs - // https://semver.org. We remove that part in order for - // go-version to not error. - v = v[:strings.Index(v, ".windows.")] - } - - have, err := version.NewVersion(v) - if err != nil { - return err - } - - if have.LessThan(want) { - return fmt.Errorf("Required git version = %s, have %s", want, have) - } - - return nil -} - -// getRunCommand is a helper that will run a command and capture the output -// in the case an error happens. -func getRunCommand(cmd *exec.Cmd) error { - var buf bytes.Buffer - cmd.Stdout = &buf - cmd.Stderr = &buf - err := cmd.Run() - if err == nil { - return nil - } - if exiterr, ok := err.(*exec.ExitError); ok { - // The program has exited with an exit code != 0 - if status, ok := exiterr.Sys().(syscall.WaitStatus); ok { - return fmt.Errorf( - "%s exited with %d: %s", - cmd.Path, - status.ExitStatus(), - buf.String()) - } - } - - return fmt.Errorf("error running %s: %s", cmd.Path, buf.String()) -} diff --git a/internal/getmodules/git_getter_test.go b/internal/getmodules/git_getter_test.go deleted file mode 100644 index 5893c9c8e..000000000 --- a/internal/getmodules/git_getter_test.go +++ /dev/null @@ -1,827 +0,0 @@ -package getmodules - -import ( - "bytes" - "encoding/base64" - "io/ioutil" - "net/url" - "os" - "os/exec" - "path/filepath" - "reflect" - "runtime" - "strings" - "testing" - - getter "github.com/hashicorp/go-getter" - urlhelper "github.com/hashicorp/go-getter/helper/url" -) - -var testHasGit bool - -func init() { - if _, err := exec.LookPath("git"); err == nil { - testHasGit = true - } -} - -func TestGitGetter_impl(t *testing.T) { - var _ getter.Getter = new(gitGetter) -} - -func TestGitGetter(t *testing.T) { - if !testHasGit { - t.Skip("git not found, skipping") - } - - g := new(gitGetter) - dst := tempDir(t) - - repo := testGitRepo(t, "basic") - repo.commitFile("foo.txt", "hello") - - // With a dir that doesn't exist - if err := g.Get(dst, repo.url); err != nil { - t.Fatalf("err: %s", err) - } - - // Verify the main file exists - mainPath := filepath.Join(dst, "foo.txt") - if _, err := os.Stat(mainPath); err != nil { - t.Fatalf("err: %s", err) - } -} - -func TestGitGetter_branch(t *testing.T) { - if !testHasGit { - t.Skip("git not found, skipping") - } - - g := new(gitGetter) - dst := tempDir(t) - - repo := testGitRepo(t, "branch") - repo.git("checkout", "-b", "test-branch") - repo.commitFile("branch.txt", "branch") - - q := repo.url.Query() - q.Add("ref", "test-branch") - repo.url.RawQuery = q.Encode() - - if err := g.Get(dst, repo.url); err != nil { - t.Fatalf("err: %s", err) - } - - // Verify the main file exists - mainPath := filepath.Join(dst, "branch.txt") - if _, err := os.Stat(mainPath); err != nil { - t.Fatalf("err: %s", err) - } - - // Get again should work - if err := g.Get(dst, repo.url); err != nil { - t.Fatalf("err: %s", err) - } - - // Verify the main file exists - mainPath = filepath.Join(dst, "branch.txt") - if _, err := os.Stat(mainPath); err != nil { - t.Fatalf("err: %s", err) - } -} - -func TestGitGetter_commitID(t *testing.T) { - if !testHasGit { - t.Skip("git not found, skipping") - } - - g := new(gitGetter) - dst := tempDir(t) - - // We're going to create different content on the main branch vs. - // another branch here, so that below we can recognize if we - // correctly cloned the commit actually requested (from the - // "other branch"), not the one at HEAD. - repo := testGitRepo(t, "commit_id") - repo.git("checkout", "-b", "main-branch") - repo.commitFile("wrong.txt", "Nope") - repo.git("checkout", "-b", "other-branch") - repo.commitFile("hello.txt", "Yep") - commitID, err := repo.latestCommit() - if err != nil { - t.Fatal(err) - } - // Return to the main branch so that HEAD of this repository - // will be that, rather than "test-branch". - repo.git("checkout", "main-branch") - - q := repo.url.Query() - q.Add("ref", commitID) - repo.url.RawQuery = q.Encode() - - t.Logf("Getting %s", repo.url) - if err := g.Get(dst, repo.url); err != nil { - t.Fatalf("err: %s", err) - } - - // Verify the main file exists - mainPath := filepath.Join(dst, "hello.txt") - if _, err := os.Stat(mainPath); err != nil { - t.Fatalf("err: %s", err) - } - - // Get again should work - if err := g.Get(dst, repo.url); err != nil { - t.Fatalf("err: %s", err) - } - - // Verify the main file exists - mainPath = filepath.Join(dst, "hello.txt") - if _, err := os.Stat(mainPath); err != nil { - t.Fatalf("err: %s", err) - } -} - -func TestGitGetter_remoteWithoutMaster(t *testing.T) { - if !testHasGit { - t.Log("git not found, skipping") - t.Skip() - } - - g := new(gitGetter) - dst := tempDir(t) - - repo := testGitRepo(t, "branch") - repo.git("checkout", "-b", "test-branch") - repo.commitFile("branch.txt", "branch") - - q := repo.url.Query() - repo.url.RawQuery = q.Encode() - - if err := g.Get(dst, repo.url); err != nil { - t.Fatalf("err: %s", err) - } - - // Verify the main file exists - mainPath := filepath.Join(dst, "branch.txt") - if _, err := os.Stat(mainPath); err != nil { - t.Fatalf("err: %s", err) - } - - // Get again should work - if err := g.Get(dst, repo.url); err != nil { - t.Fatalf("err: %s", err) - } - - // Verify the main file exists - mainPath = filepath.Join(dst, "branch.txt") - if _, err := os.Stat(mainPath); err != nil { - t.Fatalf("err: %s", err) - } -} - -func TestGitGetter_shallowClone(t *testing.T) { - if !testHasGit { - t.Log("git not found, skipping") - t.Skip() - } - - g := new(gitGetter) - dst := tempDir(t) - - repo := testGitRepo(t, "upstream") - repo.commitFile("upstream.txt", "0") - repo.commitFile("upstream.txt", "1") - - // Specifiy a clone depth of 1 - q := repo.url.Query() - q.Add("depth", "1") - repo.url.RawQuery = q.Encode() - - if err := g.Get(dst, repo.url); err != nil { - t.Fatalf("err: %s", err) - } - - // Assert rev-list count is '1' - cmd := exec.Command("git", "rev-list", "HEAD", "--count") - cmd.Dir = dst - b, err := cmd.Output() - if err != nil { - t.Fatalf("err: %s", err) - } - - out := strings.TrimSpace(string(b)) - if out != "1" { - t.Fatalf("expected rev-list count to be '1' but got %v", out) - } -} - -func TestGitGetter_shallowCloneWithTag(t *testing.T) { - if !testHasGit { - t.Log("git not found, skipping") - t.Skip() - } - - g := new(gitGetter) - dst := tempDir(t) - - repo := testGitRepo(t, "upstream") - repo.commitFile("v1.0.txt", "0") - repo.git("tag", "v1.0") - repo.commitFile("v1.1.txt", "1") - - // Specifiy a clone depth of 1 with a tag - q := repo.url.Query() - q.Add("ref", "v1.0") - q.Add("depth", "1") - repo.url.RawQuery = q.Encode() - - if err := g.Get(dst, repo.url); err != nil { - t.Fatalf("err: %s", err) - } - - // Assert rev-list count is '1' - cmd := exec.Command("git", "rev-list", "HEAD", "--count") - cmd.Dir = dst - b, err := cmd.Output() - if err != nil { - t.Fatalf("err: %s", err) - } - - out := strings.TrimSpace(string(b)) - if out != "1" { - t.Fatalf("expected rev-list count to be '1' but got %v", out) - } - - // Verify the v1.0 file exists - mainPath := filepath.Join(dst, "v1.0.txt") - if _, err := os.Stat(mainPath); err != nil { - t.Fatalf("err: %s", err) - } - - // Verify the v1.1 file does not exists - mainPath = filepath.Join(dst, "v1.1.txt") - if _, err := os.Stat(mainPath); err == nil { - t.Fatalf("expected v1.1 file to not exist") - } -} - -func TestGitGetter_shallowCloneWithCommitID(t *testing.T) { - if !testHasGit { - t.Log("git not found, skipping") - t.Skip() - } - - g := new(gitGetter) - dst := tempDir(t) - - repo := testGitRepo(t, "upstream") - repo.commitFile("v1.0.txt", "0") - repo.git("tag", "v1.0") - repo.commitFile("v1.1.txt", "1") - - commitID, err := repo.latestCommit() - if err != nil { - t.Fatal(err) - } - - // Specify a clone depth of 1 with a naked commit ID - // This is intentionally invalid: shallow clone always requires a named ref. - q := repo.url.Query() - q.Add("ref", commitID[:8]) - q.Add("depth", "1") - repo.url.RawQuery = q.Encode() - - t.Logf("Getting %s", repo.url) - err = g.Get(dst, repo.url) - if err == nil { - t.Fatalf("success; want error") - } - // We use a heuristic to generate an extra hint in the error message if - // it looks like the user was trying to combine ref=COMMIT with depth. - if got, want := err.Error(), "(note that setting 'depth' requires 'ref' to be a branch or tag name)"; !strings.Contains(got, want) { - t.Errorf("missing error message hint\ngot: %s\nwant substring: %s", got, want) - } -} - -func TestGitGetter_branchUpdate(t *testing.T) { - if !testHasGit { - t.Skip("git not found, skipping") - } - - g := new(gitGetter) - dst := tempDir(t) - - // First setup the state with a fresh branch - repo := testGitRepo(t, "branch-update") - repo.git("checkout", "-b", "test-branch") - repo.commitFile("branch.txt", "branch") - - // Get the "test-branch" branch - q := repo.url.Query() - q.Add("ref", "test-branch") - repo.url.RawQuery = q.Encode() - if err := g.Get(dst, repo.url); err != nil { - t.Fatalf("err: %s", err) - } - - // Verify the main file exists - mainPath := filepath.Join(dst, "branch.txt") - if _, err := os.Stat(mainPath); err != nil { - t.Fatalf("err: %s", err) - } - - // Commit an update to the branch - repo.commitFile("branch-update.txt", "branch-update") - - // Get again should work - if err := g.Get(dst, repo.url); err != nil { - t.Fatalf("err: %s", err) - } - - // Verify the main file exists - mainPath = filepath.Join(dst, "branch-update.txt") - if _, err := os.Stat(mainPath); err != nil { - t.Fatalf("err: %s", err) - } -} - -func TestGitGetter_tag(t *testing.T) { - if !testHasGit { - t.Skip("git not found, skipping") - } - - g := new(gitGetter) - dst := tempDir(t) - - repo := testGitRepo(t, "tag") - repo.commitFile("tag.txt", "tag") - repo.git("tag", "v1.0") - - q := repo.url.Query() - q.Add("ref", "v1.0") - repo.url.RawQuery = q.Encode() - - if err := g.Get(dst, repo.url); err != nil { - t.Fatalf("err: %s", err) - } - - // Verify the main file exists - mainPath := filepath.Join(dst, "tag.txt") - if _, err := os.Stat(mainPath); err != nil { - t.Fatalf("err: %s", err) - } - - // Get again should work - if err := g.Get(dst, repo.url); err != nil { - t.Fatalf("err: %s", err) - } - - // Verify the main file exists - mainPath = filepath.Join(dst, "tag.txt") - if _, err := os.Stat(mainPath); err != nil { - t.Fatalf("err: %s", err) - } -} - -func TestGitGetter_GetFile(t *testing.T) { - if !testHasGit { - t.Skip("git not found, skipping") - } - - g := new(gitGetter) - dst := tempTestFile(t) - defer os.RemoveAll(filepath.Dir(dst)) - - repo := testGitRepo(t, "file") - repo.commitFile("file.txt", "hello") - - // Download the file - repo.url.Path = filepath.Join(repo.url.Path, "file.txt") - if err := g.GetFile(dst, repo.url); err != nil { - t.Fatalf("err: %s", err) - } - - // Verify the main file exists - if _, err := os.Stat(dst); err != nil { - t.Fatalf("err: %s", err) - } - assertContents(t, dst, "hello") -} - -func TestGitGetter_gitVersion(t *testing.T) { - if !testHasGit { - t.Skip("git not found, skipping") - } - if runtime.GOOS == "windows" { - t.Skip("skipping on windows since the test requires sh") - } - dir, err := ioutil.TempDir("", "go-getter") - if err != nil { - t.Fatal(err) - } - defer os.RemoveAll(dir) - - script := filepath.Join(dir, "git") - err = ioutil.WriteFile( - script, - []byte("#!/bin/sh\necho \"git version 2.0 (Some Metadata Here)\n\""), - 0700) - if err != nil { - t.Fatal(err) - } - - defer func(v string) { - os.Setenv("PATH", v) - }(os.Getenv("PATH")) - - os.Setenv("PATH", dir) - - // Asking for a higher version throws an error - if err := checkGitVersion("2.3"); err == nil { - t.Fatal("expect git version error") - } - - // Passes when version is satisfied - if err := checkGitVersion("1.9"); err != nil { - t.Fatal(err) - } -} - -func TestGitGetter_sshKey(t *testing.T) { - if !testHasGit { - t.Skip("git not found, skipping") - } - - g := new(gitGetter) - dst := tempDir(t) - - encodedKey := base64.StdEncoding.EncodeToString([]byte(testGitToken)) - - // avoid getting locked by a github authenticity validation prompt - os.Setenv("GIT_SSH_COMMAND", "ssh -o StrictHostKeyChecking=no -o IdentitiesOnly=yes") - defer os.Setenv("GIT_SSH_COMMAND", "") - - u, err := urlhelper.Parse("ssh://git@github.com/hashicorp/test-private-repo" + - "?sshkey=" + encodedKey) - if err != nil { - t.Fatal(err) - } - - if err := g.Get(dst, u); err != nil { - t.Fatalf("err: %s", err) - } - - readmePath := filepath.Join(dst, "README.md") - if _, err := os.Stat(readmePath); err != nil { - t.Fatalf("err: %s", err) - } -} - -func TestGitGetter_sshSCPStyle(t *testing.T) { - if !testHasGit { - t.Skip("git not found, skipping") - } - - g := new(gitGetter) - dst := tempDir(t) - - encodedKey := base64.StdEncoding.EncodeToString([]byte(testGitToken)) - - // avoid getting locked by a github authenticity validation prompt - os.Setenv("GIT_SSH_COMMAND", "ssh -o StrictHostKeyChecking=no -o IdentitiesOnly=yes") - defer os.Setenv("GIT_SSH_COMMAND", "") - - // This test exercises the combination of the git detector and the - // git getter, to make sure that together they make scp-style URLs work. - client := &getter.Client{ - Src: "git@github.com:hashicorp/test-private-repo?sshkey=" + encodedKey, - Dst: dst, - Pwd: ".", - - Mode: getter.ClientModeDir, - - Detectors: []getter.Detector{ - new(getter.GitDetector), - }, - Getters: map[string]getter.Getter{ - "git": g, - }, - } - - if err := client.Get(); err != nil { - t.Fatalf("client.Get failed: %s", err) - } - - readmePath := filepath.Join(dst, "README.md") - if _, err := os.Stat(readmePath); err != nil { - t.Fatalf("err: %s", err) - } -} - -func TestGitGetter_sshExplicitPort(t *testing.T) { - if !testHasGit { - t.Skip("git not found, skipping") - } - - g := new(gitGetter) - dst := tempDir(t) - - encodedKey := base64.StdEncoding.EncodeToString([]byte(testGitToken)) - - // avoid getting locked by a github authenticity validation prompt - os.Setenv("GIT_SSH_COMMAND", "ssh -o StrictHostKeyChecking=no -o IdentitiesOnly=yes") - defer os.Setenv("GIT_SSH_COMMAND", "") - - // This test exercises the combination of the git detector and the - // git getter, to make sure that together they make scp-style URLs work. - client := &getter.Client{ - Src: "git::ssh://git@github.com:22/hashicorp/test-private-repo?sshkey=" + encodedKey, - Dst: dst, - Pwd: ".", - - Mode: getter.ClientModeDir, - - Detectors: []getter.Detector{ - new(getter.GitDetector), - }, - Getters: map[string]getter.Getter{ - "git": g, - }, - } - - if err := client.Get(); err != nil { - t.Fatalf("client.Get failed: %s", err) - } - - readmePath := filepath.Join(dst, "README.md") - if _, err := os.Stat(readmePath); err != nil { - t.Fatalf("err: %s", err) - } -} - -func TestGitGetter_sshSCPStyleInvalidScheme(t *testing.T) { - if !testHasGit { - t.Skip("git not found, skipping") - } - - g := new(gitGetter) - dst := tempDir(t) - - encodedKey := base64.StdEncoding.EncodeToString([]byte(testGitToken)) - - // avoid getting locked by a github authenticity validation prompt - os.Setenv("GIT_SSH_COMMAND", "ssh -o StrictHostKeyChecking=no -o IdentitiesOnly=yes") - defer os.Setenv("GIT_SSH_COMMAND", "") - - // This test exercises the combination of the git detector and the - // git getter, to make sure that together they make scp-style URLs work. - client := &getter.Client{ - Src: "git::ssh://git@github.com:hashicorp/test-private-repo?sshkey=" + encodedKey, - Dst: dst, - Pwd: ".", - - Mode: getter.ClientModeDir, - - Detectors: []getter.Detector{ - new(getter.GitDetector), - }, - Getters: map[string]getter.Getter{ - "git": g, - }, - } - - err := client.Get() - if err == nil { - t.Fatalf("get succeeded; want error") - } - - got := err.Error() - want1, want2 := `invalid source string`, `invalid port number "hashicorp"` - if !(strings.Contains(got, want1) || strings.Contains(got, want2)) { - t.Fatalf("wrong error\ngot: %s\nwant: %q or %q", got, want1, want2) - } -} - -func TestGitGetter_submodule(t *testing.T) { - if !testHasGit { - t.Skip("git not found, skipping") - } - - g := new(gitGetter) - dst := tempDir(t) - - relpath := func(basepath, targpath string) string { - relpath, err := filepath.Rel(basepath, targpath) - if err != nil { - t.Fatal(err) - } - return strings.Replace(relpath, `\`, `/`, -1) - // on windows git still prefers relatives paths - // containing `/` for submodules - } - - // Set up the grandchild - gc := testGitRepo(t, "grandchild") - gc.commitFile("grandchild.txt", "grandchild") - - // Set up the child - c := testGitRepo(t, "child") - c.commitFile("child.txt", "child") - c.git("submodule", "add", "-f", relpath(c.dir, gc.dir)) - c.git("commit", "-m", "Add grandchild submodule") - - // Set up the parent - p := testGitRepo(t, "parent") - p.commitFile("parent.txt", "parent") - p.git("submodule", "add", "-f", relpath(p.dir, c.dir)) - p.git("commit", "-m", "Add child submodule") - - // Clone the root repository - if err := g.Get(dst, p.url); err != nil { - t.Fatalf("err: %s", err) - } - - // Check that the files exist - for _, path := range []string{ - filepath.Join(dst, "parent.txt"), - filepath.Join(dst, "child", "child.txt"), - filepath.Join(dst, "child", "grandchild", "grandchild.txt"), - } { - if _, err := os.Stat(path); err != nil { - t.Fatalf("err: %s", err) - } - } -} - -func TestGitGetter_setupGitEnv_sshKey(t *testing.T) { - if runtime.GOOS == "windows" { - t.Skip("skipping on windows since the test requires sh") - } - - cmd := exec.Command("/bin/sh", "-c", "echo $GIT_SSH_COMMAND") - setupGitEnv(cmd, "/tmp/foo.pem") - out, err := cmd.Output() - if err != nil { - t.Fatal(err) - } - - actual := strings.TrimSpace(string(out)) - if actual != "ssh -i /tmp/foo.pem" { - t.Fatalf("unexpected GIT_SSH_COMMAND: %q", actual) - } -} - -func TestGitGetter_setupGitEnvWithExisting_sshKey(t *testing.T) { - if runtime.GOOS == "windows" { - t.Skipf("skipping on windows since the test requires sh") - return - } - - // start with an existing ssh command configuration - os.Setenv("GIT_SSH_COMMAND", "ssh -o StrictHostKeyChecking=no -o IdentitiesOnly=yes") - defer os.Setenv("GIT_SSH_COMMAND", "") - - cmd := exec.Command("/bin/sh", "-c", "echo $GIT_SSH_COMMAND") - setupGitEnv(cmd, "/tmp/foo.pem") - out, err := cmd.Output() - if err != nil { - t.Fatal(err) - } - - actual := strings.TrimSpace(string(out)) - if actual != "ssh -o StrictHostKeyChecking=no -o IdentitiesOnly=yes -i /tmp/foo.pem" { - t.Fatalf("unexpected GIT_SSH_COMMAND: %q", actual) - } -} - -// gitRepo is a helper struct which controls a single temp git repo. -type gitRepo struct { - t *testing.T - url *url.URL - dir string -} - -// testGitRepo creates a new test git repository. -func testGitRepo(t *testing.T, name string) *gitRepo { - t.Helper() - dir, err := ioutil.TempDir("", "go-getter") - if err != nil { - t.Fatal(err) - } - dir = filepath.Join(dir, name) - if err := os.Mkdir(dir, 0700); err != nil { - t.Fatal(err) - } - - r := &gitRepo{ - t: t, - dir: dir, - } - - url, err := urlhelper.Parse("file://" + r.dir) - if err != nil { - t.Fatal(err) - } - r.url = url - - t.Logf("initializing git repo in %s", dir) - r.git("init") - r.git("config", "user.name", "go-getter") - r.git("config", "user.email", "go-getter@hashicorp.com") - - return r -} - -// git runs a git command against the repo. -func (r *gitRepo) git(args ...string) { - cmd := exec.Command("git", args...) - cmd.Dir = r.dir - bfr := bytes.NewBuffer(nil) - cmd.Stderr = bfr - if err := cmd.Run(); err != nil { - r.t.Fatal(err, bfr.String()) - } -} - -// commitFile writes and commits a text file to the repo. -func (r *gitRepo) commitFile(file, content string) { - path := filepath.Join(r.dir, file) - if err := ioutil.WriteFile(path, []byte(content), 0600); err != nil { - r.t.Fatal(err) - } - r.git("add", file) - r.git("commit", "-m", "Adding "+file) -} - -// latestCommit returns the full commit id of the latest commit on the current -// branch. -func (r *gitRepo) latestCommit() (string, error) { - cmd := exec.Command("git", "rev-parse", "HEAD") - cmd.Dir = r.dir - rawOut, err := cmd.Output() - if err != nil { - return "", err - } - rawOut = bytes.TrimSpace(rawOut) - return string(rawOut), nil -} - -// This is a read-only deploy key for an empty test repository. -// Note: This is split over multiple lines to avoid being disabled by key -// scanners automatically. -var testGitToken = `-----BEGIN RSA PRIVATE KEY----- -MIIEpAIBAAKCAQEA9cHsxCl3Jjgu9DHpwvmfFOl1XEdY+ShHDR/cMnzJ5ddk5/oV -Wy6EWatvyHZfRSZMwzv4PtKeUPm6iXjqWp4xdWU9khlPzozyj+U9Fq70TRVUW9E5 -T1XdQVwJE421yffr4VMMwu60wBqjI1epapH2i2inYvw9Zl9X2MXq0+jTvFvDerbT -mDtfStDPljenELAIZtWVETSvbI46gALwbxbM2292ZUIL4D6jRz0aZMmyy/twYv8r -9WGJLwmYzU518Ie7zqKW/mCTdTrV0WRiDj0MeRaPgrGY9amuHE4r9iG/cJkwpKAO -Ccz0Hs6i89u9vZnTqZU9V7weJqRAQcMjXXR6yQIDAQABAoIBAQDBzICKnGxiTlHw -rd+6qqChnAy5jWYDbZjCJ8q8YZ3RS08+g/8NXZxvHftTqM0uOaq1FviHig3gq15H -hHvCpBc6jXDFYoKFzq6FfO/0kFkE5HoWweIgxwRow0xBCDJAJ+ryUEyy+Ay/pQHb -IAjwilRS0V+WdnVw4mTjBAhPvb4jPOo97Yfy3PYUyx2F3newkqXOZy+zx3G/ANoa -ncypfMGyy76sfCWKqw4J1gVkVQLwbB6gQkXUFGYwY9sRrxbG93kQw76Flc/E/s52 -62j4v1IM0fq0t/St+Y/+s6Lkw` + `aqt3ft1nsqWcRaVDdqvMfkzgJGXlw0bGzJG5MEQ -AIBq3dHRAoGBAP8OeG/DKG2Z1VmSfzuz1pas1fbZ+F7venOBrjez3sKlb3Pyl2aH -mt2wjaTUi5v10VrHgYtOEdqyhQeUSYydWXIBKNMag0NLLrfFUKZK+57wrHWFdFjn -VgpsdkLSNTOZpC8gA5OaJ+36IcOPfGqyyP9wuuRoaYnVT1KEzqLa9FEFAoGBAPaq -pglwhil2rxjJE4zq0afQLNpAfi7Xqcrepij+xvJIcIj7nawxXuPxqRFxONE/h3yX -zkybO8wLdbHX9Iw/wc1j50Uf1Z5gHdLf7/hQJoWKpz1RnkWRy6CYON8v1tpVp0tb -OAajR/kZnzebq2mfa7pyy5zDCX++2kp/dcFwHf31AoGAE8oupBVTZLWj7TBFuP8q -LkS40U92Sv9v09iDCQVmylmFvUxcXPM2m+7f/qMTNgWrucxzC7kB/6MMWVszHbrz -vrnCTibnemgx9sZTjKOSxHFOIEw7i85fSa3Cu0qOIDPSnmlwfZpfcMKQrhjLAYhf -uhooFiLX1X78iZ2OXup4PHUCgYEAsmBrm83sp1V1gAYBBlnVbXakyNv0pCk/Vz61 -iFXeRt1NzDGxLxGw3kQnED8BaIh5kQcyn8Fud7sdzJMv/LAqlT4Ww60mzNYTGyjo -H3jOsqm3ESfRvduWFreeAQBWbiOczGjV1i8D4EbAFfWT+tjXjchwKBf+6Yt5zn/o -Bw/uEHUCgYAFs+JPOR25oRyBs7ujrMo/OY1z/eXTVVgZxY+tYGe1FJqDeFyR7ytK -+JBB1MuDwQKGm2wSIXdCzTNoIx2B9zTseiPTwT8G7vqNFhXoIaTBp4P2xIQb45mJ -7GkTsMBHwpSMOXgX9Weq3v5xOJ2WxVtjENmd6qzxcYCO5lP15O17hA== ------END RSA PRIVATE KEY-----` - -func assertContents(t *testing.T, path string, contents string) { - data, err := ioutil.ReadFile(path) - if err != nil { - t.Fatalf("err: %s", err) - } - - if !reflect.DeepEqual(data, []byte(contents)) { - t.Fatalf("bad. expected:\n\n%s\n\nGot:\n\n%s", contents, string(data)) - } -} - -func tempDir(t *testing.T) string { - dir, err := ioutil.TempDir("", "tf") - if err != nil { - t.Fatalf("err: %s", err) - } - if err := os.RemoveAll(dir); err != nil { - t.Fatalf("err: %s", err) - } - - return dir -} - -func tempTestFile(t *testing.T) string { - dir := tempDir(t) - return filepath.Join(dir, "foo") -} diff --git a/website/docs/language/modules/sources.mdx b/website/docs/language/modules/sources.mdx index 1af189b94..d4aa0d35b 100644 --- a/website/docs/language/modules/sources.mdx +++ b/website/docs/language/modules/sources.mdx @@ -239,20 +239,44 @@ only SSH key authentication is supported, and By default, Terraform will clone and use the default branch (referenced by `HEAD`) in the selected repository. You can override this using the `ref` argument. The value of the `ref` argument can be any reference that would be accepted -by the `git checkout` command, such as branch, SHA-1 hash (short or full), or tag names. The [Git documentation](https://git-scm.com/book/en/v2/Git-Tools-Revision-Selection#_single_revisions) contains a complete list. +by the `git checkout` command, such as branch, SHA-1 hash (short or full), or tag names. +For a full list of the possible values, see +[Git Tools - Revision Selection](https://git-scm.com/book/en/v2/Git-Tools-Revision-Selection#_single_revisions) +in [the Git Book](https://git-scm.com/book/en/v2). ```hcl -# referencing a specific release +# select a specific tag module "vpc" { source = "git::https://example.com/vpc.git?ref=v1.2.0" } -# referencing a specific commit SHA-1 hash +# directly select a commit using its SHA-1 hash module "storage" { source = "git::https://example.com/storage.git?ref=51d462976d84fdea54b47d80dcabbf680badcdb8" } ``` +### Shallow Clone + +For larger repositories you may prefer to make only a shallow clone in order +to reduce the time taken to retrieve the remote repository. + +The `depth` URL argument corresponds to +[the `--depth` argument to `git clone`](https://git-scm.com/docs/git-clone#Documentation/git-clone.txt---depthltdepthgt), +telling Git to create a shallow clone with the history truncated to only +the specified number of commits. + +However, because shallow clone requires different Git protocol behavior, +setting the `depth` argument makes Terraform pass your [`ref` argument](#selecting-a-revision), +if any, to +[the `--branch` argument to `git clone`](https://git-scm.com/docs/git-clone#Documentation/git-clone.txt---branchltnamegt) +instead. That means it must specify a named branch or tag known to the remote +repository, and that raw commit IDs are not acceptable. + +Because Terraform only uses the most recent selected commit to find the source +code of your specified module, it is not typically useful to set `depth` +to any value other than `1`. + ### "scp-like" address syntax When using Git over SSH, we recommend using the `ssh://`-prefixed URL form @@ -421,10 +445,12 @@ module "consul" { } ``` -The module installer uses Google Cloud SDK to authenticate with GCS. To set credentials you can: +The module installer uses Google Cloud SDK to authenticate with GCS. You can +use any of the following methods to set Google Cloud Platform credentials: -* Enter the path of your service account key file in the GOOGLE_APPLICATION_CREDENTIALS environment variable, or; -* If you're running Terraform from a GCE instance, default credentials are automatically available. See [Creating and Enabling Service Accounts](https://cloud.google.com/compute/docs/access/create-enable-service-accounts-for-instances) for Instances for more details +* Set the `GOOGLE_OAUTH_ACCESS_TOKEN` environment variable to a raw Google Cloud Platform OAuth access token. +* Enter the path of your service account key file in the `GOOGLE_APPLICATION_CREDENTIALS` environment variable. +* If you're running Terraform from a GCE instance, default credentials are automatically available. See [Creating and Enabling Service Accounts](https://cloud.google.com/compute/docs/access/create-enable-service-accounts-for-instances) for Instances for more details. * On your computer, you can make your Google identity available by running `gcloud auth application-default login`. ## Modules in Package Sub-directories From 3d769b7282614db7ccf1a33ec2cdefa0e1465f7c Mon Sep 17 00:00:00 2001 From: James Bardin Date: Tue, 21 Dec 2021 14:50:53 -0500 Subject: [PATCH 17/68] IsModuleMoveReIndex Add a method for checking if the From and To addresses in a move statement are only changing the indexes of modules relative to the statement module. This is needed because move statement nested within the module will be able to match against both the From and To addresses, causing cycles in the order of move operations. --- internal/addrs/module_instance_test.go | 6 +- internal/addrs/move_endpoint_module.go | 52 +++++++- internal/addrs/move_endpoint_module_test.go | 133 ++++++++++++++++++++ 3 files changed, 187 insertions(+), 4 deletions(-) diff --git a/internal/addrs/module_instance_test.go b/internal/addrs/module_instance_test.go index 4ad096cfc..393bcd57e 100644 --- a/internal/addrs/module_instance_test.go +++ b/internal/addrs/module_instance_test.go @@ -162,9 +162,9 @@ func TestModuleInstance_IsDeclaredByCall(t *testing.T) { } func mustParseModuleInstanceStr(str string) ModuleInstance { - mi, err := ParseModuleInstanceStr(str) - if err != nil { - panic(err) + mi, diags := ParseModuleInstanceStr(str) + if diags.HasErrors() { + panic(diags.ErrWithWarnings()) } return mi } diff --git a/internal/addrs/move_endpoint_module.go b/internal/addrs/move_endpoint_module.go index e2180f25a..7ff17621b 100644 --- a/internal/addrs/move_endpoint_module.go +++ b/internal/addrs/move_endpoint_module.go @@ -373,7 +373,7 @@ func (e *MoveEndpointInModule) CanChainFrom(other *MoveEndpointInModule) bool { return false } -// NestedWithin returns true if the reciever describes an address that is +// NestedWithin returns true if the receiver describes an address that is // contained within one of the objects that the given other address could // select. func (e *MoveEndpointInModule) NestedWithin(other *MoveEndpointInModule) bool { @@ -704,3 +704,53 @@ func (r AbsResourceInstance) MoveDestination(fromMatch, toMatch *MoveEndpointInM panic("unexpected object kind") } } + +// IsModuleMoveReIndex takes the from and to endpoints from a move statement, +// and returns true if the only changes are to module indexes, and all +// non-absolute paths remain the same. +func IsModuleMoveReIndex(from, to *MoveEndpointInModule) bool { + // The statements must originate from the same module. + if !from.module.Equal(to.module) { + panic("cannot compare move expressions from different modules") + } + + switch f := from.relSubject.(type) { + case AbsModuleCall: + switch t := to.relSubject.(type) { + case ModuleInstance: + if len(t) != 1 { + // An AbsModuleCall only ever has one segment, so the + // ModuleInstance length must match. + return false + } + + return f.Call.Name == t[0].Name + } + + case ModuleInstance: + switch t := to.relSubject.(type) { + case AbsModuleCall: + if len(f) != 1 { + return false + } + + return f[0].Name == t.Call.Name + + case ModuleInstance: + // We must have the same number of segments, and the names must all + // match in order for this to solely be an index change operation. + if len(f) != len(t) { + return false + } + + for i := range f { + if f[i].Name != t[i].Name { + return false + } + } + return true + } + } + + return false +} diff --git a/internal/addrs/move_endpoint_module_test.go b/internal/addrs/move_endpoint_module_test.go index bda37ca53..1e2758239 100644 --- a/internal/addrs/move_endpoint_module_test.go +++ b/internal/addrs/move_endpoint_module_test.go @@ -1584,6 +1584,139 @@ func TestSelectsResource(t *testing.T) { } } +func TestIsModuleMoveReIndex(t *testing.T) { + tests := []struct { + from, to AbsMoveable + expect bool + }{ + { + from: mustParseModuleInstanceStr(`module.bar`), + to: mustParseModuleInstanceStr(`module.bar`), + expect: true, + }, + { + from: mustParseModuleInstanceStr(`module.bar`), + to: mustParseModuleInstanceStr(`module.bar[0]`), + expect: true, + }, + { + from: AbsModuleCall{ + Call: ModuleCall{Name: "bar"}, + }, + to: mustParseModuleInstanceStr(`module.bar[0]`), + expect: true, + }, + { + from: mustParseModuleInstanceStr(`module.bar["a"]`), + to: AbsModuleCall{ + Call: ModuleCall{Name: "bar"}, + }, + expect: true, + }, + { + from: mustParseModuleInstanceStr(`module.foo`), + to: mustParseModuleInstanceStr(`module.bar`), + expect: false, + }, + { + from: mustParseModuleInstanceStr(`module.bar`), + to: mustParseModuleInstanceStr(`module.foo[0]`), + expect: false, + }, + { + from: AbsModuleCall{ + Call: ModuleCall{Name: "bar"}, + }, + to: mustParseModuleInstanceStr(`module.foo[0]`), + expect: false, + }, + { + from: mustParseModuleInstanceStr(`module.bar["a"]`), + to: AbsModuleCall{ + Call: ModuleCall{Name: "foo"}, + }, + expect: false, + }, + { + from: mustParseModuleInstanceStr(`module.bar.module.baz`), + to: mustParseModuleInstanceStr(`module.bar.module.baz`), + expect: true, + }, + { + from: mustParseModuleInstanceStr(`module.bar.module.baz`), + to: mustParseModuleInstanceStr(`module.bar.module.baz[0]`), + expect: true, + }, + { + from: mustParseModuleInstanceStr(`module.bar.module.baz`), + to: mustParseModuleInstanceStr(`module.baz.module.baz`), + expect: false, + }, + { + from: mustParseModuleInstanceStr(`module.bar.module.baz`), + to: mustParseModuleInstanceStr(`module.baz.module.baz[0]`), + expect: false, + }, + { + from: mustParseModuleInstanceStr(`module.bar.module.baz`), + to: mustParseModuleInstanceStr(`module.bar[0].module.baz`), + expect: true, + }, + { + from: mustParseModuleInstanceStr(`module.bar[0].module.baz`), + to: mustParseModuleInstanceStr(`module.bar.module.baz[0]`), + expect: true, + }, + { + from: mustParseModuleInstanceStr(`module.bar[0].module.baz`), + to: mustParseModuleInstanceStr(`module.bar[1].module.baz[0]`), + expect: true, + }, + { + from: AbsModuleCall{ + Call: ModuleCall{Name: "baz"}, + }, + to: mustParseModuleInstanceStr(`module.bar.module.baz[0]`), + expect: false, + }, + { + from: mustParseModuleInstanceStr(`module.bar.module.baz[0]`), + to: AbsModuleCall{ + Call: ModuleCall{Name: "baz"}, + }, + expect: false, + }, + { + from: mustParseModuleInstanceStr(`module.baz`), + to: mustParseModuleInstanceStr(`module.bar.module.baz[0]`), + expect: false, + }, + { + from: mustParseModuleInstanceStr(`module.bar.module.baz[0]`), + to: mustParseModuleInstanceStr(`module.baz`), + expect: false, + }, + } + + for i, test := range tests { + t.Run(fmt.Sprintf("[%02d]IsModuleMoveReIndex(%s, %s)", i, test.from, test.to), + func(t *testing.T) { + from := &MoveEndpointInModule{ + relSubject: test.from, + } + + to := &MoveEndpointInModule{ + relSubject: test.to, + } + + if got := IsModuleMoveReIndex(from, to); got != test.expect { + t.Errorf("expected %t, got %t", test.expect, got) + } + }, + ) + } +} + func mustParseAbsResourceInstanceStr(s string) AbsResourceInstance { r, diags := ParseAbsResourceInstanceStr(s) if diags.HasErrors() { From deb82daf2bbb28ccaf6d52142b4e74428ca21e72 Mon Sep 17 00:00:00 2001 From: James Bardin Date: Tue, 21 Dec 2021 15:40:56 -0500 Subject: [PATCH 18/68] find implied moves in nested modules Implied moves in nested modules were being skipped --- internal/refactoring/move_statement.go | 2 +- internal/refactoring/move_statement_test.go | 47 +++++++++++++++++++ .../child/move-statement-implied.tf | 16 +++++++ .../move-statement-implied.tf | 4 ++ 4 files changed, 68 insertions(+), 1 deletion(-) create mode 100644 internal/refactoring/testdata/move-statement-implied/child/move-statement-implied.tf diff --git a/internal/refactoring/move_statement.go b/internal/refactoring/move_statement.go index a363602c3..08fffeb6f 100644 --- a/internal/refactoring/move_statement.go +++ b/internal/refactoring/move_statement.go @@ -149,7 +149,7 @@ func impliedMoveStatements(cfg *configs.Config, prevRunState *states.State, expl } for _, childCfg := range cfg.Children { - into = findMoveStatements(childCfg, into) + into = impliedMoveStatements(childCfg, prevRunState, explicitStmts, into) } return into diff --git a/internal/refactoring/move_statement_test.go b/internal/refactoring/move_statement_test.go index c6f7c2d79..249d7df7e 100644 --- a/internal/refactoring/move_statement_test.go +++ b/internal/refactoring/move_statement_test.go @@ -18,6 +18,15 @@ func TestImpliedMoveStatements(t *testing.T) { Name: name, }.Absolute(addrs.RootModuleInstance) } + + nestedResourceAddr := func(mod, name string) addrs.AbsResource { + return addrs.Resource{ + Mode: addrs.ManagedResourceMode, + Type: "foo", + Name: name, + }.Absolute(addrs.RootModuleInstance.Child(mod, addrs.NoKey)) + } + instObjState := func() *states.ResourceInstanceObjectSrc { return &states.ResourceInstanceObjectSrc{} } @@ -86,6 +95,19 @@ func TestImpliedMoveStatements(t *testing.T) { instObjState(), providerAddr, ) + + // Add two resource nested in a module to ensure we find these + // recursively. + s.SetResourceInstanceCurrent( + nestedResourceAddr("child", "formerly_count").Instance(addrs.IntKey(0)), + instObjState(), + providerAddr, + ) + s.SetResourceInstanceCurrent( + nestedResourceAddr("child", "now_count").Instance(addrs.NoKey), + instObjState(), + providerAddr, + ) }) explicitStmts := FindMoveStatements(rootCfg) @@ -101,6 +123,19 @@ func TestImpliedMoveStatements(t *testing.T) { End: tfdiags.SourcePos{Line: 5, Column: 32, Byte: 211}, }, }, + + // Found implied moves in a nested module, ignoring the explicit moves + { + From: addrs.ImpliedMoveStatementEndpoint(nestedResourceAddr("child", "formerly_count").Instance(addrs.IntKey(0)), tfdiags.SourceRange{}), + To: addrs.ImpliedMoveStatementEndpoint(nestedResourceAddr("child", "formerly_count").Instance(addrs.NoKey), tfdiags.SourceRange{}), + Implied: true, + DeclRange: tfdiags.SourceRange{ + Filename: "testdata/move-statement-implied/child/move-statement-implied.tf", + Start: tfdiags.SourcePos{Line: 5, Column: 1, Byte: 180}, + End: tfdiags.SourcePos{Line: 5, Column: 32, Byte: 211}, + }, + }, + { From: addrs.ImpliedMoveStatementEndpoint(resourceAddr("now_count").Instance(addrs.NoKey), tfdiags.SourceRange{}), To: addrs.ImpliedMoveStatementEndpoint(resourceAddr("now_count").Instance(addrs.IntKey(0)), tfdiags.SourceRange{}), @@ -112,6 +147,18 @@ func TestImpliedMoveStatements(t *testing.T) { }, }, + // Found implied moves in a nested module, ignoring the explicit moves + { + From: addrs.ImpliedMoveStatementEndpoint(nestedResourceAddr("child", "now_count").Instance(addrs.NoKey), tfdiags.SourceRange{}), + To: addrs.ImpliedMoveStatementEndpoint(nestedResourceAddr("child", "now_count").Instance(addrs.IntKey(0)), tfdiags.SourceRange{}), + Implied: true, + DeclRange: tfdiags.SourceRange{ + Filename: "testdata/move-statement-implied/child/move-statement-implied.tf", + Start: tfdiags.SourcePos{Line: 10, Column: 11, Byte: 282}, + End: tfdiags.SourcePos{Line: 10, Column: 12, Byte: 283}, + }, + }, + // We generate foo.ambiguous[0] to foo.ambiguous here, even though // there's already a foo.ambiguous in the state, because it's the // responsibility of the later ApplyMoves step to deal with the diff --git a/internal/refactoring/testdata/move-statement-implied/child/move-statement-implied.tf b/internal/refactoring/testdata/move-statement-implied/child/move-statement-implied.tf new file mode 100644 index 000000000..87d09c827 --- /dev/null +++ b/internal/refactoring/testdata/move-statement-implied/child/move-statement-implied.tf @@ -0,0 +1,16 @@ +# This fixture is useful only in conjunction with a previous run state that +# conforms to the statements encoded in the resource names. It's for +# TestImpliedMoveStatements only. + +resource "foo" "formerly_count" { + # but not count anymore +} + +resource "foo" "now_count" { + count = 1 +} + +moved { + from = foo.no_longer_present[1] + to = foo.no_longer_present +} diff --git a/internal/refactoring/testdata/move-statement-implied/move-statement-implied.tf b/internal/refactoring/testdata/move-statement-implied/move-statement-implied.tf index 498ead305..4ea628ea6 100644 --- a/internal/refactoring/testdata/move-statement-implied/move-statement-implied.tf +++ b/internal/refactoring/testdata/move-statement-implied/move-statement-implied.tf @@ -48,3 +48,7 @@ resource "foo" "ambiguous" { # set it up to have both no-key and zero-key instances in the # state. } + +module "child" { + source = "./child" +} From 22dc685052178184e998902bf3e881f618b079fb Mon Sep 17 00:00:00 2001 From: James Bardin Date: Tue, 21 Dec 2021 15:08:48 -0500 Subject: [PATCH 19/68] check for nested module index changes Changing only the index on a nested module will cause all nested moves to create cycles, since their full addresses will match both the From and To addresses. When building the dependency graph, check if the parent is only changing the index of the containing module, and prevent the backwards edge for the move. --- internal/addrs/move_endpoint_module.go | 40 +++++----------- internal/addrs/move_endpoint_module_test.go | 21 ++++++++- internal/refactoring/move_execute.go | 30 ++++++++++-- internal/refactoring/move_validate_test.go | 52 +++++++++++++++++++++ 4 files changed, 109 insertions(+), 34 deletions(-) diff --git a/internal/addrs/move_endpoint_module.go b/internal/addrs/move_endpoint_module.go index 7ff17621b..fdc8a5c25 100644 --- a/internal/addrs/move_endpoint_module.go +++ b/internal/addrs/move_endpoint_module.go @@ -705,10 +705,10 @@ func (r AbsResourceInstance) MoveDestination(fromMatch, toMatch *MoveEndpointInM } } -// IsModuleMoveReIndex takes the from and to endpoints from a move statement, -// and returns true if the only changes are to module indexes, and all -// non-absolute paths remain the same. -func IsModuleMoveReIndex(from, to *MoveEndpointInModule) bool { +// IsModuleReIndex takes the From and To endpoints from a single move +// statement, and returns true if the only changes are to module indexes, and +// all non-absolute paths remain the same. +func (from *MoveEndpointInModule) IsModuleReIndex(to *MoveEndpointInModule) bool { // The statements must originate from the same module. if !from.module.Equal(to.module) { panic("cannot compare move expressions from different modules") @@ -718,37 +718,21 @@ func IsModuleMoveReIndex(from, to *MoveEndpointInModule) bool { case AbsModuleCall: switch t := to.relSubject.(type) { case ModuleInstance: - if len(t) != 1 { - // An AbsModuleCall only ever has one segment, so the - // ModuleInstance length must match. - return false - } - - return f.Call.Name == t[0].Name + // Generate a synthetic module to represent the full address of + // the module call. We're not actually comparing indexes, so the + // instance doesn't matter. + callAddr := f.Instance(NoKey).Module() + return callAddr.Equal(t.Module()) } case ModuleInstance: switch t := to.relSubject.(type) { case AbsModuleCall: - if len(f) != 1 { - return false - } - - return f[0].Name == t.Call.Name + callAddr := t.Instance(NoKey).Module() + return callAddr.Equal(f.Module()) case ModuleInstance: - // We must have the same number of segments, and the names must all - // match in order for this to solely be an index change operation. - if len(f) != len(t) { - return false - } - - for i := range f { - if f[i].Name != t[i].Name { - return false - } - } - return true + return t.Module().Equal(f.Module()) } } diff --git a/internal/addrs/move_endpoint_module_test.go b/internal/addrs/move_endpoint_module_test.go index 1e2758239..c1643d44c 100644 --- a/internal/addrs/move_endpoint_module_test.go +++ b/internal/addrs/move_endpoint_module_test.go @@ -1686,6 +1686,25 @@ func TestIsModuleMoveReIndex(t *testing.T) { }, expect: false, }, + + { + from: AbsModuleCall{ + Module: mustParseModuleInstanceStr(`module.bar[0]`), + Call: ModuleCall{Name: "baz"}, + }, + to: mustParseModuleInstanceStr(`module.bar.module.baz[0]`), + expect: true, + }, + + { + from: mustParseModuleInstanceStr(`module.bar.module.baz[0]`), + to: AbsModuleCall{ + Module: mustParseModuleInstanceStr(`module.bar[0]`), + Call: ModuleCall{Name: "baz"}, + }, + expect: true, + }, + { from: mustParseModuleInstanceStr(`module.baz`), to: mustParseModuleInstanceStr(`module.bar.module.baz[0]`), @@ -1709,7 +1728,7 @@ func TestIsModuleMoveReIndex(t *testing.T) { relSubject: test.to, } - if got := IsModuleMoveReIndex(from, to); got != test.expect { + if got := from.IsModuleReIndex(to); got != test.expect { t.Errorf("expected %t, got %t", test.expect, got) } }, diff --git a/internal/refactoring/move_execute.go b/internal/refactoring/move_execute.go index b99da1072..97a9d5c7f 100644 --- a/internal/refactoring/move_execute.go +++ b/internal/refactoring/move_execute.go @@ -242,11 +242,31 @@ func statementDependsOn(a, b *MoveStatement) bool { // // Since we are only interested in checking if A depends on B, we only need // to check the 4 possibilities above which result in B being executed - // first. - return a.From.NestedWithin(b.To) || - a.To.NestedWithin(b.To) || - b.From.NestedWithin(a.From) || - b.To.NestedWithin(a.From) + // first. If we're there's no dependency at all we can return immediately. + if !(a.From.NestedWithin(b.To) || a.To.NestedWithin(b.To) || + b.From.NestedWithin(a.From) || b.To.NestedWithin(a.From)) { + return false + } + + // If a nested move has a dependency, we need to rule out the possibility + // that this is a move inside a module only changing indexes. If an + // ancestor module is only changing the index of a nested module, any + // nested move statements are going to match both the From and To address + // when the base name is not changing, causing a cycle in the order of + // operations. + + // if A is not declared in an ancestor module, then we can't be nested + // within a module index change. + if len(a.To.Module()) >= len(b.To.Module()) { + return true + } + // We only want the nested move statement to depend on the outer module + // move, so we only test this in the reverse direction. + if a.From.IsModuleReIndex(a.To) { + return false + } + + return true } // MoveResults describes the outcome of an ApplyMoves call. diff --git a/internal/refactoring/move_validate_test.go b/internal/refactoring/move_validate_test.go index 53bbbe6c2..60122511f 100644 --- a/internal/refactoring/move_validate_test.go +++ b/internal/refactoring/move_validate_test.go @@ -404,6 +404,58 @@ Each resource can have moved from only one source resource.`, }, WantError: `Resource type mismatch: This statement declares a move from test.nonexist1[0] to other.single, which is a resource instance of a different type.`, }, + "crossing nested statements": { + // overlapping nested moves will result in a cycle. + Statements: []MoveStatement{ + makeTestMoveStmt(t, ``, + `module.nonexist.test.single`, + `module.count[0].test.count[0]`, + ), + makeTestMoveStmt(t, ``, + `module.nonexist`, + `module.count[0]`, + ), + }, + WantError: `Cyclic dependency in move statements: The following chained move statements form a cycle, and so there is no final location to move objects to: + - test:1,1: module.nonexist → module.count[0] + - test:1,1: module.nonexist.test.single → module.count[0].test.count[0] + +A chain of move statements must end with an address that doesn't appear in any other statements, and which typically also refers to an object still declared in the configuration.`, + }, + "fully contained nested statements": { + // we have to avoid a cycle because the nested moves appear in both + // the from and to address of the parent when only the module index + // is changing. + Statements: []MoveStatement{ + makeTestMoveStmt(t, `count`, + `test.count`, + `test.count[0]`, + ), + makeTestMoveStmt(t, ``, + `module.count`, + `module.count[0]`, + ), + }, + }, + "double fully contained nested statements": { + // we have to avoid a cycle because the nested moves appear in both + // the from and to address of the parent when only the module index + // is changing. + Statements: []MoveStatement{ + makeTestMoveStmt(t, `count`, + `module.count`, + `module.count[0]`, + ), + makeTestMoveStmt(t, `count.count`, + `test.count`, + `test.count[0]`, + ), + makeTestMoveStmt(t, ``, + `module.count`, + `module.count[0]`, + ), + }, + }, } for name, test := range tests { From f46cf7b8bcaf0e6b580b1c16d496b64cf45a75fc Mon Sep 17 00:00:00 2001 From: James Bardin Date: Wed, 22 Dec 2021 17:32:19 -0500 Subject: [PATCH 20/68] cleanup some move graph handling Create a separate `validateMoveStatementGraph` function so that `ValidateMoves` and `ApplyMoves` both check the same conditions. Since we're not using the builtin `graph.Validate` method, because we may have multiple roots and want better cycle diagnostics, we need to add checks for self references too. While multiple roots are an error enforced by `Validate` for the concurrent walk, they are OK when using `TransitiveReduction` and `ReverseDepthFirstWalk`, so we can skip that check. Apply moves must first use `TransitiveReduction` to reduce the graph, otherwise nodes may be skipped if they are passed over by a transitive edge. --- internal/refactoring/move_execute.go | 17 +++++-- internal/refactoring/move_validate.go | 69 +++++++++++++++++++-------- 2 files changed, 62 insertions(+), 24 deletions(-) diff --git a/internal/refactoring/move_execute.go b/internal/refactoring/move_execute.go index 97a9d5c7f..db62152f3 100644 --- a/internal/refactoring/move_execute.go +++ b/internal/refactoring/move_execute.go @@ -31,6 +31,10 @@ func ApplyMoves(stmts []MoveStatement, state *states.State) MoveResults { Blocked: make(map[addrs.UniqueKey]MoveBlocked), } + if len(stmts) == 0 { + return ret + } + // The methodology here is to construct a small graph of all of the move // statements where the edges represent where a particular statement // is either chained from or nested inside the effect of another statement. @@ -39,13 +43,18 @@ func ApplyMoves(stmts []MoveStatement, state *states.State) MoveResults { g := buildMoveStatementGraph(stmts) - // If there are any cycles in the graph then we'll not take any action - // at all. The separate validation step should detect this and return - // an error. - if len(g.Cycles()) != 0 { + // If the graph is not valid the we will not take any action at all. The + // separate validation step should detect this and return an error. + if diags := validateMoveStatementGraph(g); diags.HasErrors() { + log.Printf("[ERROR] ApplyMoves: %s", diags.ErrWithWarnings()) return ret } + // The graph must be reduced in order for ReverseDepthFirstWalk to work + // correctly, since it is built from following edges and can skip over + // dependencies if there is a direct edge to a transitive dependency. + g.TransitiveReduction() + // The starting nodes are the ones that don't depend on any other nodes. startNodes := make(dag.Set, len(stmts)) for _, v := range g.Vertices() { diff --git a/internal/refactoring/move_validate.go b/internal/refactoring/move_validate.go index 133698608..eedf00414 100644 --- a/internal/refactoring/move_validate.go +++ b/internal/refactoring/move_validate.go @@ -8,6 +8,7 @@ import ( "github.com/hashicorp/hcl/v2" "github.com/hashicorp/terraform/internal/addrs" "github.com/hashicorp/terraform/internal/configs" + "github.com/hashicorp/terraform/internal/dag" "github.com/hashicorp/terraform/internal/instances" "github.com/hashicorp/terraform/internal/tfdiags" ) @@ -31,6 +32,10 @@ import ( func ValidateMoves(stmts []MoveStatement, rootCfg *configs.Config, declaredInsts instances.Set) tfdiags.Diagnostics { var diags tfdiags.Diagnostics + if len(stmts) == 0 { + return diags + } + g := buildMoveStatementGraph(stmts) // We need to track the absolute versions of our endpoint addresses in @@ -200,30 +205,54 @@ func ValidateMoves(stmts []MoveStatement, rootCfg *configs.Config, declaredInsts // validation rules above where we can make better suggestions, and so // we'll use a cycle report only as a last resort. if !diags.HasErrors() { - for _, cycle := range g.Cycles() { - // Reporting cycles is awkward because there isn't any definitive - // way to decide which of the objects in the cycle is the cause of - // the problem. Therefore we'll just list them all out and leave - // the user to figure it out. :( - stmtStrs := make([]string, 0, len(cycle)) - for _, stmtI := range cycle { - // move statement graph nodes are pointers to move statements - stmt := stmtI.(*MoveStatement) - stmtStrs = append(stmtStrs, fmt.Sprintf( - "\n - %s: %s → %s", - stmt.DeclRange.StartString(), - stmt.From.String(), - stmt.To.String(), - )) - } - sort.Strings(stmtStrs) // just to make the order deterministic + diags = diags.Append(validateMoveStatementGraph(g)) + } + return diags +} + +func validateMoveStatementGraph(g *dag.AcyclicGraph) tfdiags.Diagnostics { + var diags tfdiags.Diagnostics + for _, cycle := range g.Cycles() { + // Reporting cycles is awkward because there isn't any definitive + // way to decide which of the objects in the cycle is the cause of + // the problem. Therefore we'll just list them all out and leave + // the user to figure it out. :( + stmtStrs := make([]string, 0, len(cycle)) + for _, stmtI := range cycle { + // move statement graph nodes are pointers to move statements + stmt := stmtI.(*MoveStatement) + stmtStrs = append(stmtStrs, fmt.Sprintf( + "\n - %s: %s → %s", + stmt.DeclRange.StartString(), + stmt.From.String(), + stmt.To.String(), + )) + } + sort.Strings(stmtStrs) // just to make the order deterministic + + diags = diags.Append(tfdiags.Sourceless( + tfdiags.Error, + "Cyclic dependency in move statements", + fmt.Sprintf( + "The following chained move statements form a cycle, and so there is no final location to move objects to:%s\n\nA chain of move statements must end with an address that doesn't appear in any other statements, and which typically also refers to an object still declared in the configuration.", + strings.Join(stmtStrs, ""), + ), + )) + } + + // Look for cycles to self. + // A user shouldn't be able to create self-references, but we cannot + // correctly process a graph with them. + for _, e := range g.Edges() { + src := e.Source() + if src == e.Target() { diags = diags.Append(tfdiags.Sourceless( tfdiags.Error, - "Cyclic dependency in move statements", + "Self reference in move statements", fmt.Sprintf( - "The following chained move statements form a cycle, and so there is no final location to move objects to:%s\n\nA chain of move statements must end with an address that doesn't appear in any other statements, and which typically also refers to an object still declared in the configuration.", - strings.Join(stmtStrs, ""), + "The move statement %s refers to itself the move dependency graph, which is invalid. This is a bug in Terraform; please report it!", + src.(*MoveStatement).Name(), ), )) } From fae68f166fb25578026c3ef7974cb05b793d9cd9 Mon Sep 17 00:00:00 2001 From: James Bardin Date: Tue, 4 Jan 2022 09:20:30 -0500 Subject: [PATCH 21/68] Remove sorted walk functions These two functions were left during a refactor to ensure the old behavior of a sorted walk was still accessible in some manner. The package has since been removed from any public API, and the sorted versions are no longer called, so we can remove them. --- internal/dag/dag.go | 96 ---------------------------------------- internal/dag/dag_test.go | 5 ++- 2 files changed, 4 insertions(+), 97 deletions(-) diff --git a/internal/dag/dag.go b/internal/dag/dag.go index d72b3988e..5aca57944 100644 --- a/internal/dag/dag.go +++ b/internal/dag/dag.go @@ -2,7 +2,6 @@ package dag import ( "fmt" - "sort" "strings" "github.com/hashicorp/terraform/internal/tfdiags" @@ -218,49 +217,6 @@ func (g *AcyclicGraph) DepthFirstWalk(start Set, f DepthWalkFunc) error { return nil } -// SortedDepthFirstWalk does a depth-first walk of the graph starting from -// the vertices in start, always iterating the nodes in a consistent order. -func (g *AcyclicGraph) SortedDepthFirstWalk(start []Vertex, f DepthWalkFunc) error { - seen := make(map[Vertex]struct{}) - frontier := make([]*vertexAtDepth, len(start)) - for i, v := range start { - frontier[i] = &vertexAtDepth{ - Vertex: v, - Depth: 0, - } - } - for len(frontier) > 0 { - // Pop the current vertex - n := len(frontier) - current := frontier[n-1] - frontier = frontier[:n-1] - - // Check if we've seen this already and return... - if _, ok := seen[current.Vertex]; ok { - continue - } - seen[current.Vertex] = struct{}{} - - // Visit the current node - if err := f(current.Vertex, current.Depth); err != nil { - return err - } - - // Visit targets of this in a consistent order. - targets := AsVertexList(g.downEdgesNoCopy(current.Vertex)) - sort.Sort(byVertexName(targets)) - - for _, t := range targets { - frontier = append(frontier, &vertexAtDepth{ - Vertex: t, - Depth: current.Depth + 1, - }) - } - } - - return nil -} - // ReverseDepthFirstWalk does a depth-first walk _up_ the graph starting from // the vertices in start. func (g *AcyclicGraph) ReverseDepthFirstWalk(start Set, f DepthWalkFunc) error { @@ -299,55 +255,3 @@ func (g *AcyclicGraph) ReverseDepthFirstWalk(start Set, f DepthWalkFunc) error { return nil } - -// SortedReverseDepthFirstWalk does a depth-first walk _up_ the graph starting from -// the vertices in start, always iterating the nodes in a consistent order. -func (g *AcyclicGraph) SortedReverseDepthFirstWalk(start []Vertex, f DepthWalkFunc) error { - seen := make(map[Vertex]struct{}) - frontier := make([]*vertexAtDepth, len(start)) - for i, v := range start { - frontier[i] = &vertexAtDepth{ - Vertex: v, - Depth: 0, - } - } - for len(frontier) > 0 { - // Pop the current vertex - n := len(frontier) - current := frontier[n-1] - frontier = frontier[:n-1] - - // Check if we've seen this already and return... - if _, ok := seen[current.Vertex]; ok { - continue - } - seen[current.Vertex] = struct{}{} - - // Add next set of targets in a consistent order. - targets := AsVertexList(g.upEdgesNoCopy(current.Vertex)) - sort.Sort(byVertexName(targets)) - for _, t := range targets { - frontier = append(frontier, &vertexAtDepth{ - Vertex: t, - Depth: current.Depth + 1, - }) - } - - // Visit the current node - if err := f(current.Vertex, current.Depth); err != nil { - return err - } - } - - return nil -} - -// byVertexName implements sort.Interface so a list of Vertices can be sorted -// consistently by their VertexName -type byVertexName []Vertex - -func (b byVertexName) Len() int { return len(b) } -func (b byVertexName) Swap(i, j int) { b[i], b[j] = b[j], b[i] } -func (b byVertexName) Less(i, j int) bool { - return VertexName(b[i]) < VertexName(b[j]) -} diff --git a/internal/dag/dag_test.go b/internal/dag/dag_test.go index 0402cda39..9c8cdb794 100644 --- a/internal/dag/dag_test.go +++ b/internal/dag/dag_test.go @@ -392,7 +392,10 @@ func TestAcyclicGraph_ReverseDepthFirstWalk_WithRemoval(t *testing.T) { var visits []Vertex var lock sync.Mutex - err := g.SortedReverseDepthFirstWalk([]Vertex{1}, func(v Vertex, d int) error { + root := make(Set) + root.Add(1) + + err := g.ReverseDepthFirstWalk(root, func(v Vertex, d int) error { lock.Lock() defer lock.Unlock() visits = append(visits, v) From 344adb6c504e70b97ba42ce65256554630a9edf0 Mon Sep 17 00:00:00 2001 From: James Bardin Date: Tue, 4 Jan 2022 09:48:45 -0500 Subject: [PATCH 22/68] clarify dag comments TransitiveReduction does not rely on having a single root, and only must be free of cycles. DepthFirstWalk and ReverseDepthFirstWalk do not do a topological sort, so if order matters TransitiveReduction must be run first. --- internal/dag/dag.go | 10 +++++--- internal/dag/dag_test.go | 49 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 56 insertions(+), 3 deletions(-) diff --git a/internal/dag/dag.go b/internal/dag/dag.go index 5aca57944..6da10df51 100644 --- a/internal/dag/dag.go +++ b/internal/dag/dag.go @@ -88,9 +88,7 @@ func (g *AcyclicGraph) Root() (Vertex, error) { // same graph with only a single edge between A and B, and a single edge // between B and C. // -// The graph must be valid for this operation to behave properly. If -// Validate() returns an error, the behavior is undefined and the results -// will likely be unexpected. +// The graph must be free of cycles for this operation to behave properly. // // Complexity: O(V(V+E)), or asymptotically O(VE) func (g *AcyclicGraph) TransitiveReduction() { @@ -145,6 +143,8 @@ func (g *AcyclicGraph) Validate() error { return err } +// Cycles reports any cycles between graph nodes. +// Self-referencing nodes are not reported, and must be detected separately. func (g *AcyclicGraph) Cycles() [][]Vertex { var cycles [][]Vertex for _, cycle := range StronglyConnected(&g.Graph) { @@ -180,6 +180,8 @@ type vertexAtDepth struct { // DepthFirstWalk does a depth-first walk of the graph starting from // the vertices in start. +// The algorithm used here does not do a complete topological sort. To ensure +// correct overall ordering run TransitiveReduction first. func (g *AcyclicGraph) DepthFirstWalk(start Set, f DepthWalkFunc) error { seen := make(map[Vertex]struct{}) frontier := make([]*vertexAtDepth, 0, len(start)) @@ -219,6 +221,8 @@ func (g *AcyclicGraph) DepthFirstWalk(start Set, f DepthWalkFunc) error { // ReverseDepthFirstWalk does a depth-first walk _up_ the graph starting from // the vertices in start. +// The algorithm used here does not do a complete topological sort. To ensure +// correct overall ordering run TransitiveReduction first. func (g *AcyclicGraph) ReverseDepthFirstWalk(start Set, f DepthWalkFunc) error { seen := make(map[Vertex]struct{}) frontier := make([]*vertexAtDepth, 0, len(start)) diff --git a/internal/dag/dag_test.go b/internal/dag/dag_test.go index 9c8cdb794..75cfb86ff 100644 --- a/internal/dag/dag_test.go +++ b/internal/dag/dag_test.go @@ -99,6 +99,38 @@ func TestAyclicGraphTransReduction_more(t *testing.T) { } } +func TestAyclicGraphTransReduction_multipleRoots(t *testing.T) { + var g AcyclicGraph + g.Add(1) + g.Add(2) + g.Add(3) + g.Add(4) + g.Connect(BasicEdge(1, 2)) + g.Connect(BasicEdge(1, 3)) + g.Connect(BasicEdge(1, 4)) + g.Connect(BasicEdge(2, 3)) + g.Connect(BasicEdge(2, 4)) + g.Connect(BasicEdge(3, 4)) + + g.Add(5) + g.Add(6) + g.Add(7) + g.Add(8) + g.Connect(BasicEdge(5, 6)) + g.Connect(BasicEdge(5, 7)) + g.Connect(BasicEdge(5, 8)) + g.Connect(BasicEdge(6, 7)) + g.Connect(BasicEdge(6, 8)) + g.Connect(BasicEdge(7, 8)) + g.TransitiveReduction() + + actual := strings.TrimSpace(g.String()) + expected := strings.TrimSpace(testGraphTransReductionMultipleRootsStr) + if actual != expected { + t.Fatalf("bad: %s", actual) + } +} + // use this to simulate slow sort operations type counter struct { Name string @@ -429,3 +461,20 @@ const testGraphTransReductionMoreStr = ` 4 4 ` + +const testGraphTransReductionMultipleRootsStr = ` +1 + 2 +2 + 3 +3 + 4 +4 +5 + 6 +6 + 7 +7 + 8 +8 +` From f8fdb6de3f9bf2b2664ca845954094224b63369a Mon Sep 17 00:00:00 2001 From: Katy Moe Date: Wed, 5 Jan 2022 11:28:47 +0000 Subject: [PATCH 23/68] do not use pointer addr strings as map keys in set When creating a Set of BasicEdges, the Hashcode function is used to determine map keys for the underlying set data structure. The string hex representation of the two vertices' pointers is unsafe to use as a map key, since these addresses may change between the time they are added to the set and the time the set is operated on. Instead we modify the Hashcode function to maintain the references to the underlying vertices so they cannot be garbage collected during the lifetime of the Set. --- internal/dag/edge.go | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/internal/dag/edge.go b/internal/dag/edge.go index f0d99ee3a..8c78924bb 100644 --- a/internal/dag/edge.go +++ b/internal/dag/edge.go @@ -1,9 +1,5 @@ package dag -import ( - "fmt" -) - // Edge represents an edge in the graph, with a source and target vertex. type Edge interface { Source() Vertex @@ -25,7 +21,7 @@ type basicEdge struct { } func (e *basicEdge) Hashcode() interface{} { - return fmt.Sprintf("%p-%p", e.S, e.T) + return [...]interface{}{e.S, e.T} } func (e *basicEdge) Source() Vertex { From df36a03be10a460eb84477e7bccb576c0869d196 Mon Sep 17 00:00:00 2001 From: Alisdair McDiarmid Date: Wed, 5 Jan 2022 12:29:20 -0500 Subject: [PATCH 24/68] states: Add failing test for ordered dependencies --- internal/states/instance_object_test.go | 37 +++++++++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 internal/states/instance_object_test.go diff --git a/internal/states/instance_object_test.go b/internal/states/instance_object_test.go new file mode 100644 index 000000000..f8be9743e --- /dev/null +++ b/internal/states/instance_object_test.go @@ -0,0 +1,37 @@ +package states + +import ( + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/hashicorp/terraform/internal/addrs" + "github.com/zclconf/go-cty/cty" +) + +func TestResourceInstanceObject_encode(t *testing.T) { + value := cty.ObjectVal(map[string]cty.Value{ + "foo": cty.True, + }) + deps := []addrs.ConfigResource{ + addrs.RootModule.Resource(addrs.ManagedResourceMode, "test", "honk"), + addrs.RootModule.Child("child").Resource(addrs.ManagedResourceMode, "test", "flub"), + addrs.RootModule.Resource(addrs.ManagedResourceMode, "test", "boop"), + } + wantDeps := []addrs.ConfigResource{ + addrs.RootModule.Child("child").Resource(addrs.ManagedResourceMode, "test", "flub"), + addrs.RootModule.Resource(addrs.ManagedResourceMode, "test", "boop"), + addrs.RootModule.Resource(addrs.ManagedResourceMode, "test", "honk"), + } + rio := &ResourceInstanceObject{ + Value: value, + Status: ObjectPlanned, + Dependencies: deps, + } + rios, err := rio.Encode(value.Type(), 0) + if err != nil { + t.Fatalf("unexpected error: %s", err) + } + if diff := cmp.Diff(wantDeps, rios.Dependencies); diff != "" { + t.Errorf("wrong result for deps\n%s", diff) + } +} From 6704f8c795195db717a21b05b80b1007a38496f2 Mon Sep 17 00:00:00 2001 From: Martin Atkins Date: Wed, 22 Dec 2021 16:47:15 -0800 Subject: [PATCH 25/68] Initial work on a new package build process For the moment this is just an experimental additional sidecar package build process, separate from the one we really use for releases, so that we can get some experience building in the GitHub Actions environment before hopefully eventually switching to using the artifacts from this process as the packages we'll release through the official release channels. It will react to any push to one of our release branches or to a release tag by building official-release-like .zip, .deb, and .rpm packages, along with Docker images, based on the content of the corresponding commit. For the moment this doesn't actually produce _shippable_ packages because in particular it doesn't know how to update our version/version.go file to hard-code the correct version number. Once Go 1.18 is release and we've upgraded to it we'll switch to using debug.ReadBuildInfo to determine our version number at runtime and so no longer need to directly update a source file for each release, but that functionality isn't yet available in our current Go 1.17 release. --- .github/workflows/build-Dockerfile | 41 ++++++ .github/workflows/build.yml | 209 +++++++++++++++++++++++++++++ 2 files changed, 250 insertions(+) create mode 100644 .github/workflows/build-Dockerfile create mode 100644 .github/workflows/build.yml diff --git a/.github/workflows/build-Dockerfile b/.github/workflows/build-Dockerfile new file mode 100644 index 000000000..6f8f15853 --- /dev/null +++ b/.github/workflows/build-Dockerfile @@ -0,0 +1,41 @@ +# This Dockerfile is not intended for general use, but is rather used to +# produce our "light" release packages as part of our official release +# pipeline. +# +# If you want to test this locally you'll need to set the three arguments +# to values realistic for what the hashicorp/actions-docker-build GitHub +# action would set, and ensure that there's a suitable "terraform" executable +# in the dist/linux/${TARGETARCH} directory. + +FROM docker.mirror.hashicorp.services/alpine:latest AS default + +# This is intended to be run from the hashicorp/actions-docker-build GitHub +# action, which sets these appropriately based on context. +ARG PRODUCT_VERSION=UNSPECIFIED +ARG PRODUCT_REVISION=UNSPECIFIED +ARG BIN_NAME=terraform + +# This argument is set by the Docker toolchain itself, to the name +# of the CPU architecture we're building an image for. +# Our caller should've extracted the corresponding "terraform" executable +# into dist/linux/${TARGETARCH} for us to use. +ARG TARGETARCH + +LABEL maintainer="HashiCorp Terraform Team " + +# New standard version label. +LABEL version=$VERSION + +# Historical Terraform-specific label preserved for backward compatibility. +LABEL "com.hashicorp.terraform.version"="${VERSION}" + +RUN apk add --no-cache git openssh + +# The hashicorp/actions-docker-build GitHub Action extracts the appropriate +# release package for our target architecture into the current working +# directory before running "docker build", which we'll then copy into the +# Docker image to make sure that we use an identical binary as all of the +# other official release channels. +COPY ["dist/linux/${TARGETARCH}/terraform", "/bin/terraform"] + +ENTRYPOINT ["/bin/terraform"] diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 000000000..c2f2092ba --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,209 @@ +name: Build Terraform CLI Packages + +# If you want to test changes to this file before merging to a main branch, +# push them up to a branch whose name has the prefix "build-workflow-dev/", +# which is a special prefix that triggers this workflow even though it's not +# actually a release branch. + +on: + workflow_dispatch: + push: + branches: + - main + - 'v[0-9]+.[0-9]+' + - build-workflow-dev/* + tags: + - 'v[0-9]+.[0-9]+.[0-9]+*' + +env: + PKG_NAME: "terraform" + +jobs: + get-product-version: + name: "Determine intended Terraform version" + runs-on: ubuntu-latest + outputs: + product-version: ${{ steps.get-product-version.outputs.product-version }} + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 # Need all commits and tags to find a reasonable version number + - name: Decide version number + id: get-product-version + run: | + git describe --first-parent + echo "::set-output name=product-version::$(git describe --first-parent)" + + get-go-version: + name: "Determine Go toolchain version" + runs-on: ubuntu-latest + outputs: + go-version: ${{ steps.get-go-version.outputs.go-version }} + steps: + - uses: actions/checkout@v2 + - name: Determine Go version + id: get-go-version + # We use .go-version as our source of truth for current Go + # version, because "goenv" can react to it automatically. + run: | + echo "Building with Go $(cat .go-version)" + echo "::set-output name=go-version::$(cat .go-version)" + + generate-metadata-file: + name: "Generate release metadata" + needs: get-product-version + runs-on: ubuntu-latest + outputs: + filepath: ${{ steps.generate-metadata-file.outputs.filepath }} + steps: + - uses: actions/checkout@v2 + - name: Generate package metadata + id: generate-metadata-file + uses: hashicorp/actions-generate-metadata@main + with: + version: ${{ needs.get-product-version.outputs.product-version }} + product: ${{ env.PKG_NAME }} + + - uses: actions/upload-artifact@v2 + with: + name: metadata.json + path: ${{ steps.generate-metadata-file.outputs.filepath }} + + build: + needs: ["get-product-version", "get-go-version"] + runs-on: ubuntu-latest + strategy: + matrix: + include: + - {goos: "freebsd", goarch: "386"} + - {goos: "freebsd", goarch: "amd64"} + - {goos: "freebsd", goarch: "arm"} + - {goos: "linux", goarch: "386"} + - {goos: "linux", goarch: "amd64"} + - {goos: "linux", goarch: "arm"} + - {goos: "linux", goarch: "arm64"} + - {goos: "openbsd", goarch: "386"} + - {goos: "openbsd", goarch: "amd64"} + - {goos: "solaris", goarch: "amd64"} + - {goos: "windows", goarch: "386"} + - {goos: "windows", goarch: "amd64"} + fail-fast: false + + name: Build for ${{ matrix.goos }}_${{ matrix.goarch }} + + steps: + - uses: actions/checkout@v2 + + - name: Install Go toolchain + uses: actions/setup-go@v2 + with: + go-version: ${{ needs.get-go-version.outputs.go-version }} + + - name: Build + env: + GOOS: ${{ matrix.goos }} + GOARCH: ${{ matrix.goarch }} + run: | + mkdir dist out + go build -o dist/ . + zip -r -j out/${{ env.PKG_NAME }}_${{ needs.get-product-version.outputs.product-version }}_${{ matrix.goos }}_${{ matrix.goarch }}.zip dist/ + + - uses: actions/upload-artifact@v2 + with: + name: ${{ env.PKG_NAME }}_${{ needs.get-product-version.outputs.product-version }}_${{ matrix.goos }}_${{ matrix.goarch }}.zip + path: out/${{ env.PKG_NAME }}_${{ needs.get-product-version.outputs.product-version }}_${{ matrix.goos }}_${{ matrix.goarch }}.zip + + - name: Linux distribution packages + if: ${{ matrix.goos == 'linux' }} + uses: hashicorp/package@v1 + with: + name: "terraform" + description: "Terraform enables you to safely and predictably create, change, and improve infrastructure. It is an open source tool that codifies APIs into declarative configuration files that can be shared amongst team members, treated as code, edited, reviewed, and versioned." + arch: ${{ matrix.goarch }} + version: ${{ needs.get-product-version.outputs.product-version }} + maintainer: "HashiCorp" + homepage: "https://terraform.io/" + license: "MPL-2.0" + binary: "dist/${{ env.PKG_NAME }}" + deb_depends: "git" + rpm_depends: "git" + + - name: Gather Linux distribution package filenames + if: ${{ matrix.goos == 'linux' }} + run: | + echo "RPM_PACKAGE=$(basename out/*.rpm)" >> $GITHUB_ENV + echo "DEB_PACKAGE=$(basename out/*.deb)" >> $GITHUB_ENV + + - uses: actions/upload-artifact@v2 + if: ${{ matrix.goos == 'linux' }} + with: + name: ${{ env.RPM_PACKAGE }} + path: out/${{ env.RPM_PACKAGE }} + + - uses: actions/upload-artifact@v2 + if: ${{ matrix.goos == 'linux' }} + with: + name: ${{ env.DEB_PACKAGE }} + path: out/${{ env.DEB_PACKAGE }} + + build-darwin: + needs: ["get-product-version", "get-go-version"] + runs-on: macos-latest + strategy: + matrix: + include: + - {goos: "darwin", goarch: "amd64"} + - {goos: "darwin", goarch: "arm64"} + fail-fast: false + + name: Build for ${{ matrix.goos }}_${{ matrix.goarch }} + + env: + GOOS: ${{ matrix.goos }} + GOARCH: ${{ matrix.goarch }} + + steps: + - uses: actions/checkout@v2 + + - name: Install Go toolchain + uses: actions/setup-go@v2 + with: + go-version: ${{ needs.get-go-version.outputs.go-version }} + + - name: Build + run: | + mkdir dist out + go build -o dist/ + zip -r -j out/${{ env.PKG_NAME }}_${{ needs.get-product-version.outputs.product-version }}_${{ matrix.goos }}_${{ matrix.goarch }}.zip dist/ + + - uses: actions/upload-artifact@v2 + with: + name: ${{ env.PKG_NAME }}_${{ needs.get-product-version.outputs.product-version }}_${{ matrix.goos }}_${{ matrix.goarch }}.zip + path: out/${{ env.PKG_NAME }}_${{ needs.get-product-version.outputs.product-version }}_${{ matrix.goos }}_${{ matrix.goarch }}.zip + + build-docker: + name: Build Docker image for linux_${{ matrix.arch }} + needs: + - get-product-version + - build + runs-on: ubuntu-latest + strategy: + matrix: + arch: ["amd64"] + fail-fast: false + env: + repo: ${{github.event.repository.name}} + version: ${{needs.get-product-version.outputs.product-version}} + + steps: + - uses: actions/checkout@v2 + - name: Build Docker images + uses: hashicorp/actions-docker-build@v1 + with: + version: ${{env.version}} + target: default + arch: ${{matrix.arch}} + dockerfile: .github/workflows/build-Dockerfile + tags: | + docker.io/hashicorp/${{env.repo}}:${{env.version}} + 986891699432.dkr.ecr.us-east-1.amazonaws.com/hashicorp/${{env.repo}}:${{env.version}} From b802db75d73a14a2458113525246555caecb107c Mon Sep 17 00:00:00 2001 From: Martin Atkins Date: Thu, 23 Dec 2021 09:10:59 -0800 Subject: [PATCH 26/68] build: Build and run e2etest as part of the release build pipeline This uses the decoupled build and run strategy to run the e2etests so that we can arrange to run the tests against the real release packages produced elsewhere in this workflow, rather than ones generated just in time by the test harness. The modifications to make-archive.sh here make it more consistent with its originally-intended purpose of producing a harness for testing "real" release executables. Our earlier compromise of making it include its own terraform executable came from a desire to use that script as part of manual cross-platform testing when we weren't yet set up to support automation of those tests as we're doing here. That does mean, however, that the terraform-e2etest package content must be combined with content from a terraform release package in order to produce a valid contest for running the tests. We use a single job to cross-compile the test harness for all of the supported platforms, because that build is relatively fast and so not worth the overhead of matrix build, but then use a matrix build to actually run the tests so that we can run them in a worker matching the target platform. We currently have access only to amd64 (x64) runners in GitHub Actions and so for the moment this process is limited only to the subset of our supported platforms which use that architecture. --- .github/workflows/build.yml | 209 +++++++++++++++++- internal/command/e2etest/main_test.go | 25 ++- internal/command/e2etest/make-archive.sh | 13 +- internal/command/e2etest/primary_test.go | 4 +- internal/command/e2etest/provider_dev_test.go | 8 + .../command/e2etest/provider_plugin_test.go | 8 + .../command/e2etest/providers_tamper_test.go | 16 +- .../e2etest/provisioner_plugin_test.go | 8 + 8 files changed, 269 insertions(+), 22 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index c2f2092ba..ee7dc5369 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -28,11 +28,22 @@ jobs: - uses: actions/checkout@v2 with: fetch-depth: 0 # Need all commits and tags to find a reasonable version number - - name: Decide version number - id: get-product-version + - name: Git Describe + id: git-describe run: | git describe --first-parent - echo "::set-output name=product-version::$(git describe --first-parent)" + echo "::set-output name=raw-version::$(git describe --first-parent)" + - name: Decide version number + id: get-product-version + shell: bash + env: + RAW_VERSION: ${{ steps.git-describe.outputs.raw-version }} + run: | + echo "::set-output name=product-version::${RAW_VERSION#v}" + - name: Report chosen version number + run: | + [ -n "${{steps.get-product-version.outputs.product-version}}" ] + echo "::notice title=Terraform CLI Version::${{ steps.get-product-version.outputs.product-version }}" get-go-version: name: "Determine Go toolchain version" @@ -207,3 +218,195 @@ jobs: tags: | docker.io/hashicorp/${{env.repo}}:${{env.version}} 986891699432.dkr.ecr.us-east-1.amazonaws.com/hashicorp/${{env.repo}}:${{env.version}} + + e2etest-build: + name: Build e2etest for ${{ matrix.goos }}_${{ matrix.goarch }} + needs: ["get-go-version"] + runs-on: ubuntu-latest + strategy: + matrix: + # We build test harnesses only for the v1.0 Compatibility Promises + # supported platforms. Even within that set, we can only run on + # architectures for which we have GitHub Actions runners available, + # which is currently only amd64 (x64). + # TODO: GitHub Actions does support _self-hosted_ arm and arm64 + # runners, so we could potentially run some ourselves to run our + # tests there, but at the time of writing there is no documented + # support for darwin_arm64 (macOS on Apple Silicon). + include: + - {goos: "darwin", goarch: "amd64"} + #- {goos: "darwin", goarch: "arm64"} + - {goos: "windows", goarch: "amd64"} + - {goos: "linux", goarch: "amd64"} + #- {goos: "linux", goarch: "arm"} + #- {goos: "linux", goarch: "arm64"} + fail-fast: false + + env: + build_script: ./internal/command/e2etest/make-archive.sh + + steps: + - uses: actions/checkout@v2 + + - name: Install Go toolchain + uses: actions/setup-go@v2 + with: + go-version: ${{ needs.get-go-version.outputs.go-version }} + + - name: Build test harness package + env: + GOOS: ${{ matrix.goos }} + GOARCH: ${{ matrix.goarch }} + run: | + bash ./internal/command/e2etest/make-archive.sh + + - uses: actions/upload-artifact@v2 + with: + name: terraform-e2etest_${{ matrix.goos }}_${{ matrix.goarch }}.zip + path: internal/command/e2etest/build/terraform-e2etest_${{ matrix.goos }}_${{ matrix.goarch }}.zip + if-no-files-found: error + + e2etest-linux: + name: e2etest for linux_${{ matrix.goarch }} + runs-on: ubuntu-latest + needs: + - get-product-version + - build + - e2etest-build + + strategy: + matrix: + include: + - {goarch: "amd64"} + #- {goarch: "arm64"} + #- {goarch: "arm"} + fail-fast: false + + env: + os: linux + arch: ${{ matrix.goarch }} + version: ${{needs.get-product-version.outputs.product-version}} + + steps: + # NOTE: This intentionally _does not_ check out the source code + # for the commit/tag we're building, because by now we should + # have everything we need in the combination of CLI release package + # and e2etest package for this platform. (This helps ensure that we're + # really testing the release package and not inadvertently testing a + # fresh build from source.) + - name: "Download e2etest package" + uses: actions/download-artifact@v2 + id: e2etestpkg + with: + name: terraform-e2etest_${{ env.os }}_${{ env.arch }}.zip + path: . + - name: "Download Terraform CLI package" + uses: actions/download-artifact@v2 + id: clipkg + with: + name: terraform_${{env.version}}_${{ env.os }}_${{ env.arch }}.zip + path: . + - name: Extract packages + run: | + unzip "./terraform-e2etest_${{ env.os }}_${{ env.arch }}.zip" + unzip "./terraform_${{env.version}}_${{ env.os }}_${{ env.arch }}.zip" + - name: Run E2E Tests + run: | + TF_ACC=1 ./e2etest -test.v + + e2etest-darwin: + name: e2etest for darwin_${{ matrix.goarch }} + runs-on: macos-latest + needs: + - get-product-version + - build-darwin + - e2etest-build + + strategy: + matrix: + include: + - {goarch: "amd64"} + #- {goarch: "arm64"} + fail-fast: false + + env: + os: darwin + arch: ${{ matrix.goarch }} + version: ${{needs.get-product-version.outputs.product-version}} + + steps: + # NOTE: This intentionally _does not_ check out the source code + # for the commit/tag we're building, because by now we should + # have everything we need in the combination of CLI release package + # and e2etest package for this platform. (This helps ensure that we're + # really testing the release package and not inadvertently testing a + # fresh build from source.) + - name: "Download e2etest package" + uses: actions/download-artifact@v2 + id: e2etestpkg + with: + name: terraform-e2etest_${{ env.os }}_${{ env.arch }}.zip + path: . + - name: "Download Terraform CLI package" + uses: actions/download-artifact@v2 + id: clipkg + with: + name: terraform_${{env.version}}_${{ env.os }}_${{ env.arch }}.zip + path: . + - name: Extract packages + run: | + unzip "./terraform-e2etest_${{ env.os }}_${{ env.arch }}.zip" + unzip "./terraform_${{env.version}}_${{ env.os }}_${{ env.arch }}.zip" + - name: Run E2E Tests + run: | + TF_ACC=1 ./e2etest -test.v + + e2etest-windows: + name: e2etest for windows_${{ matrix.goarch }} + runs-on: windows-latest + needs: + - get-product-version + - build + - e2etest-build + + strategy: + matrix: + include: + - {goarch: "amd64"} + fail-fast: false + + env: + os: windows + arch: ${{ matrix.goarch }} + version: ${{needs.get-product-version.outputs.product-version}} + + steps: + # NOTE: This intentionally _does not_ check out the source code + # for the commit/tag we're building, because by now we should + # have everything we need in the combination of CLI release package + # and e2etest package for this platform. (This helps ensure that we're + # really testing the release package and not inadvertently testing a + # fresh build from source.) + - name: "Download e2etest package" + uses: actions/download-artifact@v2 + id: e2etestpkg + with: + name: terraform-e2etest_${{ env.os }}_${{ env.arch }}.zip + path: . + - name: "Download Terraform CLI package" + uses: actions/download-artifact@v2 + id: clipkg + with: + name: terraform_${{env.version}}_${{ env.os }}_${{ env.arch }}.zip + path: . + - name: Extract packages + shell: pwsh + run: | + Expand-Archive -LiteralPath 'terraform-e2etest_${{ env.os }}_${{ env.arch }}.zip' -DestinationPath '.' + Expand-Archive -LiteralPath 'terraform_${{env.version}}_${{ env.os }}_${{ env.arch }}.zip' -DestinationPath '.' + - name: Run E2E Tests + env: + TF_ACC: 1 + shell: cmd + run: | + e2etest.exe -test.v diff --git a/internal/command/e2etest/main_test.go b/internal/command/e2etest/main_test.go index 01e20a982..3c9ba5a5e 100644 --- a/internal/command/e2etest/main_test.go +++ b/internal/command/e2etest/main_test.go @@ -11,6 +11,18 @@ import ( var terraformBin string +// canRunGoBuild is a short-term compromise to account for the fact that we +// have a small number of tests that work by building helper programs using +// "go build" at runtime, but we can't do that in our isolated test mode +// driven by the make-archive.sh script. +// +// FIXME: Rework this a bit so that we build the necessary helper programs +// (test plugins, etc) as part of the initial suite setup, and in the +// make-archive.sh script, so that we can run all of the tests in both +// situations with the tests just using the executable already built for +// them, as we do for terraformBin. +var canRunGoBuild bool + func TestMain(m *testing.M) { teardown := setup() code := m.Run() @@ -21,10 +33,10 @@ func TestMain(m *testing.M) { func setup() func() { if terraformBin != "" { // this is pre-set when we're running in a binary produced from - // the make-archive.sh script, since that builds a ready-to-go - // binary into the archive. However, we do need to turn it into - // an absolute path so that we can find it when we change the - // working directory during tests. + // the make-archive.sh script, since that is for testing an + // executable obtained from a real release package. However, we do + // need to turn it into an absolute path so that we can find it + // when we change the working directory during tests. var err error terraformBin, err = filepath.Abs(terraformBin) if err != nil { @@ -38,6 +50,11 @@ func setup() func() { // Make the executable available for use in tests terraformBin = tmpFilename + // Tests running in the ad-hoc testing mode are allowed to use "go build" + // and similar to produce other test executables. + // (See the comment on this variable's declaration for more information.) + canRunGoBuild = true + return func() { os.Remove(tmpFilename) } diff --git a/internal/command/e2etest/make-archive.sh b/internal/command/e2etest/make-archive.sh index 8fabe2f7a..040633b5d 100755 --- a/internal/command/e2etest/make-archive.sh +++ b/internal/command/e2etest/make-archive.sh @@ -13,9 +13,12 @@ # and then executed as follows: # set TF_ACC=1 # ./e2etest.exe -# Since the test archive includes both the test fixtures and the compiled -# terraform executable along with this test program, the result is -# self-contained and does not require a local Go compiler on the target system. +# +# Because separated e2etest harnesses are intended for testing against "real" +# release executables, the generated archives don't include a copy of +# the Terraform executable. Instead, the caller of the tests must retrieve +# and extract a release package into the working directory before running +# the e2etest executable, so that "e2etest" can find and execute it. set +euo pipefail @@ -33,10 +36,6 @@ mkdir -p "$OUTDIR" # We need the test fixtures available when we run the tests. cp -r testdata "$OUTDIR/testdata" -# Bundle a copy of our binary so the target system doesn't need the go -# compiler installed. -go build -o "$OUTDIR/terraform$GOEXE" github.com/hashicorp/terraform - # Build the test program go test -o "$OUTDIR/e2etest$GOEXE" -c -ldflags "-X github.com/hashicorp/terraform/internal/command/e2etest.terraformBin=./terraform$GOEXE" github.com/hashicorp/terraform/internal/command/e2etest diff --git a/internal/command/e2etest/primary_test.go b/internal/command/e2etest/primary_test.go index 4081d2d4f..de4ad95b2 100644 --- a/internal/command/e2etest/primary_test.go +++ b/internal/command/e2etest/primary_test.go @@ -204,13 +204,13 @@ func TestPrimaryChdirOption(t *testing.T) { } gotOutput := state.RootModule().OutputValues["cwd"] - wantOutputValue := cty.StringVal(tf.Path()) // path.cwd returns the original path, because path.root is how we get the overridden path + wantOutputValue := cty.StringVal(filepath.ToSlash(tf.Path())) // path.cwd returns the original path, because path.root is how we get the overridden path if gotOutput == nil || !wantOutputValue.RawEquals(gotOutput.Value) { t.Errorf("incorrect value for cwd output\ngot: %#v\nwant Value: %#v", gotOutput, wantOutputValue) } gotOutput = state.RootModule().OutputValues["root"] - wantOutputValue = cty.StringVal(tf.Path("subdir")) // path.root is a relative path, but the text fixture uses abspath on it. + wantOutputValue = cty.StringVal(filepath.ToSlash(tf.Path("subdir"))) // path.root is a relative path, but the text fixture uses abspath on it. if gotOutput == nil || !wantOutputValue.RawEquals(gotOutput.Value) { t.Errorf("incorrect value for root output\ngot: %#v\nwant Value: %#v", gotOutput, wantOutputValue) } diff --git a/internal/command/e2etest/provider_dev_test.go b/internal/command/e2etest/provider_dev_test.go index 8c52c2909..1aac10bcd 100644 --- a/internal/command/e2etest/provider_dev_test.go +++ b/internal/command/e2etest/provider_dev_test.go @@ -18,6 +18,14 @@ import ( // we normally do, so they can just overwrite the same local executable // in-place to iterate faster. func TestProviderDevOverrides(t *testing.T) { + if !canRunGoBuild { + // We're running in a separate-build-then-run context, so we can't + // currently execute this test which depends on being able to build + // new executable at runtime. + // + // (See the comment on canRunGoBuild's declaration for more information.) + t.Skip("can't run without building a new provider executable") + } t.Parallel() tf := e2e.NewBinary(terraformBin, "testdata/provider-dev-override") diff --git a/internal/command/e2etest/provider_plugin_test.go b/internal/command/e2etest/provider_plugin_test.go index 585818b1b..c8ac7fe3f 100644 --- a/internal/command/e2etest/provider_plugin_test.go +++ b/internal/command/e2etest/provider_plugin_test.go @@ -13,6 +13,14 @@ import ( // TestProviderProtocols verifies that Terraform can execute provider plugins // with both supported protocol versions. func TestProviderProtocols(t *testing.T) { + if !canRunGoBuild { + // We're running in a separate-build-then-run context, so we can't + // currently execute this test which depends on being able to build + // new executable at runtime. + // + // (See the comment on canRunGoBuild's declaration for more information.) + t.Skip("can't run without building a new provider executable") + } t.Parallel() tf := e2e.NewBinary(terraformBin, "testdata/provider-plugin") diff --git a/internal/command/e2etest/providers_tamper_test.go b/internal/command/e2etest/providers_tamper_test.go index 03026354a..0c285c1f2 100644 --- a/internal/command/e2etest/providers_tamper_test.go +++ b/internal/command/e2etest/providers_tamper_test.go @@ -41,12 +41,16 @@ func TestProviderTampering(t *testing.T) { seedDir := tf.WorkDir() const providerVersion = "3.1.0" // must match the version in the fixture config - pluginDir := ".terraform/providers/registry.terraform.io/hashicorp/null/" + providerVersion + "/" + getproviders.CurrentPlatform.String() - pluginExe := pluginDir + "/terraform-provider-null_v" + providerVersion + "_x5" + pluginDir := filepath.Join(".terraform", "providers", "registry.terraform.io", "hashicorp", "null", providerVersion, getproviders.CurrentPlatform.String()) + pluginExe := filepath.Join(pluginDir, "terraform-provider-null_v"+providerVersion+"_x5") if getproviders.CurrentPlatform.OS == "windows" { pluginExe += ".exe" // ugh } + // filepath.Join here to make sure we get the right path separator + // for whatever OS we're running these tests on. + providerCacheDir := filepath.Join(".terraform", "providers") + t.Run("cache dir totally gone", func(t *testing.T) { tf := e2e.NewBinary(terraformBin, seedDir) defer tf.Close() @@ -61,7 +65,7 @@ func TestProviderTampering(t *testing.T) { if err == nil { t.Fatalf("unexpected plan success\nstdout:\n%s", stdout) } - if want := `registry.terraform.io/hashicorp/null: there is no package for registry.terraform.io/hashicorp/null 3.1.0 cached in .terraform/providers`; !strings.Contains(stderr, want) { + if want := `registry.terraform.io/hashicorp/null: there is no package for registry.terraform.io/hashicorp/null 3.1.0 cached in ` + providerCacheDir; !strings.Contains(stderr, want) { t.Errorf("missing expected error message\nwant substring: %s\ngot:\n%s", want, stderr) } if want := `terraform init`; !strings.Contains(stderr, want) { @@ -128,7 +132,7 @@ func TestProviderTampering(t *testing.T) { if err == nil { t.Fatalf("unexpected plan success\nstdout:\n%s", stdout) } - if want := `registry.terraform.io/hashicorp/null: the cached package for registry.terraform.io/hashicorp/null 3.1.0 (in .terraform/providers) does not match any of the checksums recorded in the dependency lock file`; !strings.Contains(stderr, want) { + if want := `registry.terraform.io/hashicorp/null: the cached package for registry.terraform.io/hashicorp/null 3.1.0 (in ` + providerCacheDir + `) does not match any of the checksums recorded in the dependency lock file`; !strings.Contains(stderr, want) { t.Errorf("missing expected error message\nwant substring: %s\ngot:\n%s", want, stderr) } if want := `terraform init`; !strings.Contains(stderr, want) { @@ -237,7 +241,7 @@ func TestProviderTampering(t *testing.T) { if err == nil { t.Fatalf("unexpected apply success\nstdout:\n%s", stdout) } - if want := `registry.terraform.io/hashicorp/null: there is no package for registry.terraform.io/hashicorp/null 3.1.0 cached in .terraform/providers`; !strings.Contains(stderr, want) { + if want := `registry.terraform.io/hashicorp/null: there is no package for registry.terraform.io/hashicorp/null 3.1.0 cached in ` + providerCacheDir; !strings.Contains(stderr, want) { t.Errorf("missing expected error message\nwant substring: %s\ngot:\n%s", want, stderr) } }) @@ -260,7 +264,7 @@ func TestProviderTampering(t *testing.T) { if err == nil { t.Fatalf("unexpected apply success\nstdout:\n%s", stdout) } - if want := `registry.terraform.io/hashicorp/null: the cached package for registry.terraform.io/hashicorp/null 3.1.0 (in .terraform/providers) does not match any of the checksums recorded in the dependency lock file`; !strings.Contains(stderr, want) { + if want := `registry.terraform.io/hashicorp/null: the cached package for registry.terraform.io/hashicorp/null 3.1.0 (in ` + providerCacheDir + `) does not match any of the checksums recorded in the dependency lock file`; !strings.Contains(stderr, want) { t.Errorf("missing expected error message\nwant substring: %s\ngot:\n%s", want, stderr) } }) diff --git a/internal/command/e2etest/provisioner_plugin_test.go b/internal/command/e2etest/provisioner_plugin_test.go index 4220df574..4ee75f9ce 100644 --- a/internal/command/e2etest/provisioner_plugin_test.go +++ b/internal/command/e2etest/provisioner_plugin_test.go @@ -12,6 +12,14 @@ import ( // TestProvisionerPlugin is a test that terraform can execute a 3rd party // provisioner plugin. func TestProvisionerPlugin(t *testing.T) { + if !canRunGoBuild { + // We're running in a separate-build-then-run context, so we can't + // currently execute this test which depends on being able to build + // new executable at runtime. + // + // (See the comment on canRunGoBuild's declaration for more information.) + t.Skip("can't run without building a new provisioner executable") + } t.Parallel() // This test reaches out to releases.hashicorp.com to download the From 218e55b23cf19cb55183617ee68b86d6cc363c14 Mon Sep 17 00:00:00 2001 From: Martin Atkins Date: Mon, 3 Jan 2022 10:03:00 -0800 Subject: [PATCH 27/68] build: Build docs source package as part of the release pipeline This should eventually grow to be a step that actually verifies the validity of the docs source prior to publishing the artifact that a downstream publishing pipeline can consume, but for the moment it's really just a placeholder since we have no such validation step and no downstream pipeline consuming this artifact. The general idea here is that the artifacts from this workflow should be sufficient for all downstream release steps to occur without any direct access to the Terraform CLI repository, and so this is intended to eventually meet that ideal but as of this commit the website docs publishing step _does_ still depend on direct access to this repository. --- .github/workflows/build.yml | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index ee7dc5369..dd28e27da 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -410,3 +410,26 @@ jobs: shell: cmd run: | e2etest.exe -test.v + + docs-source-package: + name: "Build documentation bundle" + runs-on: ubuntu-latest + needs: + - get-product-version + + env: + version: ${{needs.get-product-version.outputs.product-version}} + + steps: + - uses: actions/checkout@v2 + # FIXME: We should include some sort of pre-validation step here, to + # confirm that the doc content is mechanically valid so that the + # publishing pipeline will be able to render all content without errors. + - name: "Create documentation source bundle" + run: | + (cd website && zip -9 -r ../terraform-cli-docs-source_${{ env.version }}.zip .) + - uses: actions/upload-artifact@v2 + with: + name: terraform-cli-docs-source_${{ env.version }}.zip + path: terraform-cli-docs-source_${{ env.version }}.zip + if-no-files-found: error From 3bf758eaf5e0b381a902bdda82a4e583d7844cb8 Mon Sep 17 00:00:00 2001 From: Martin Atkins Date: Mon, 3 Jan 2022 10:27:42 -0800 Subject: [PATCH 28/68] build: Consolidate build-darwin job into just "build" We can use an extra matrix dimension to select which execution environment we'll use for each GOOS/GOARCH pair, and thus avoid duplicating the job definition for darwin just to set runs-on: macos-latest for it. This is not really an intended use of a matrix dimension because it's directly related to the existing "goos" one, rather than being an independent third dimension, but it doesn't matter in practice because we're using the "include" option to specify exact combinations, and thus we're not relying on the built-in functionality to generate all possible matrix combinations. --- .github/workflows/build.yml | 89 +++++++++++++++---------------------- 1 file changed, 35 insertions(+), 54 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index dd28e27da..825a6a1da 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -62,10 +62,11 @@ jobs: generate-metadata-file: name: "Generate release metadata" - needs: get-product-version runs-on: ubuntu-latest + needs: get-product-version outputs: filepath: ${{ steps.generate-metadata-file.outputs.filepath }} + steps: - uses: actions/checkout@v2 - name: Generate package metadata @@ -81,27 +82,31 @@ jobs: path: ${{ steps.generate-metadata-file.outputs.filepath }} build: - needs: ["get-product-version", "get-go-version"] - runs-on: ubuntu-latest + name: Build for ${{ matrix.goos }}_${{ matrix.goarch }} + runs-on: ${{ matrix.runson }} + needs: + - get-product-version + - get-go-version + strategy: matrix: include: - - {goos: "freebsd", goarch: "386"} - - {goos: "freebsd", goarch: "amd64"} - - {goos: "freebsd", goarch: "arm"} - - {goos: "linux", goarch: "386"} - - {goos: "linux", goarch: "amd64"} - - {goos: "linux", goarch: "arm"} - - {goos: "linux", goarch: "arm64"} - - {goos: "openbsd", goarch: "386"} - - {goos: "openbsd", goarch: "amd64"} - - {goos: "solaris", goarch: "amd64"} - - {goos: "windows", goarch: "386"} - - {goos: "windows", goarch: "amd64"} + - {goos: "freebsd", goarch: "386", runson: "ubuntu-latest"} + - {goos: "freebsd", goarch: "amd64", runson: "ubuntu-latest"} + - {goos: "freebsd", goarch: "arm", runson: "ubuntu-latest"} + - {goos: "linux", goarch: "386", runson: "ubuntu-latest"} + - {goos: "linux", goarch: "amd64", runson: "ubuntu-latest"} + - {goos: "linux", goarch: "arm", runson: "ubuntu-latest"} + - {goos: "linux", goarch: "arm64", runson: "ubuntu-latest"} + - {goos: "openbsd", goarch: "386", runson: "ubuntu-latest"} + - {goos: "openbsd", goarch: "amd64", runson: "ubuntu-latest"} + - {goos: "solaris", goarch: "amd64", runson: "ubuntu-latest"} + - {goos: "windows", goarch: "386", runson: "ubuntu-latest"} + - {goos: "windows", goarch: "amd64", runson: "ubuntu-latest"} + - {goos: "darwin", goarch: "amd64", runson: "macos-latest"} + - {goos: "darwin", goarch: "arm64", runson: "macos-latest"} fail-fast: false - name: Build for ${{ matrix.goos }}_${{ matrix.goarch }} - steps: - uses: actions/checkout@v2 @@ -110,13 +115,22 @@ jobs: with: go-version: ${{ needs.get-go-version.outputs.go-version }} + # FIXME: We're not currently setting the hard-coded version string in + # version/version.go at any point here, which means that the packages + # this process builds are not suitable for release. Once we're using + # Go 1.18 we may begin using the version information automatically + # embedded by the Go toolchain, at which point we won't need any + # special steps during build, but failing that we'll need to rework + # the version/version.go package so we can more readily update it + # using linker flags rather than direct code modification. + - name: Build env: GOOS: ${{ matrix.goos }} GOARCH: ${{ matrix.goarch }} run: | mkdir dist out - go build -o dist/ . + go build -ldflags "-w -s" -o dist/ . zip -r -j out/${{ env.PKG_NAME }}_${{ needs.get-product-version.outputs.product-version }}_${{ matrix.goos }}_${{ matrix.goarch }}.zip dist/ - uses: actions/upload-artifact@v2 @@ -145,6 +159,8 @@ jobs: echo "RPM_PACKAGE=$(basename out/*.rpm)" >> $GITHUB_ENV echo "DEB_PACKAGE=$(basename out/*.deb)" >> $GITHUB_ENV + # FIXME: Generate homebrew packages when targeting macOS. + - uses: actions/upload-artifact@v2 if: ${{ matrix.goos == 'linux' }} with: @@ -157,41 +173,6 @@ jobs: name: ${{ env.DEB_PACKAGE }} path: out/${{ env.DEB_PACKAGE }} - build-darwin: - needs: ["get-product-version", "get-go-version"] - runs-on: macos-latest - strategy: - matrix: - include: - - {goos: "darwin", goarch: "amd64"} - - {goos: "darwin", goarch: "arm64"} - fail-fast: false - - name: Build for ${{ matrix.goos }}_${{ matrix.goarch }} - - env: - GOOS: ${{ matrix.goos }} - GOARCH: ${{ matrix.goarch }} - - steps: - - uses: actions/checkout@v2 - - - name: Install Go toolchain - uses: actions/setup-go@v2 - with: - go-version: ${{ needs.get-go-version.outputs.go-version }} - - - name: Build - run: | - mkdir dist out - go build -o dist/ - zip -r -j out/${{ env.PKG_NAME }}_${{ needs.get-product-version.outputs.product-version }}_${{ matrix.goos }}_${{ matrix.goarch }}.zip dist/ - - - uses: actions/upload-artifact@v2 - with: - name: ${{ env.PKG_NAME }}_${{ needs.get-product-version.outputs.product-version }}_${{ matrix.goos }}_${{ matrix.goarch }}.zip - path: out/${{ env.PKG_NAME }}_${{ needs.get-product-version.outputs.product-version }}_${{ matrix.goos }}_${{ matrix.goarch }}.zip - build-docker: name: Build Docker image for linux_${{ matrix.arch }} needs: @@ -319,7 +300,7 @@ jobs: runs-on: macos-latest needs: - get-product-version - - build-darwin + - build - e2etest-build strategy: From c1699ea80c93ba971b810496683aec3cf17a72f2 Mon Sep 17 00:00:00 2001 From: Martin Atkins Date: Mon, 3 Jan 2022 10:45:08 -0800 Subject: [PATCH 29/68] build: Constrain permissions for the "build" workflow steps This workflow only generates artifacts and doesn't need to modify anything about the repository. --- .github/workflows/build.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 825a6a1da..ff06462ea 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -18,6 +18,10 @@ on: env: PKG_NAME: "terraform" +permissions: + contents: read + statuses: write + jobs: get-product-version: name: "Determine intended Terraform version" From 28a6036cf269ab62f0f33d9bb7a167e367aab54a Mon Sep 17 00:00:00 2001 From: Martin Atkins Date: Mon, 3 Jan 2022 11:20:24 -0800 Subject: [PATCH 30/68] build: Separate Linux distro package builds into separate job In our build workflow we'll treat Linux distribution packaging (currently .deb and .rpm packages) as a separate job, instead of embedding it into the "build" job, so that this step can happen concurrently with the other derived actions like the docker image build, and the e2etest runs. --- .github/workflows/build.yml | 95 ++++++++++++++++++++++++++++--------- 1 file changed, 73 insertions(+), 22 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index ff06462ea..c0fd888b3 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -5,6 +5,20 @@ name: Build Terraform CLI Packages # which is a special prefix that triggers this workflow even though it's not # actually a release branch. +# NOTE: This workflow is currently used only to verify that all commits to a +# release branch are buildable. It's set up to generate some artifacts that +# might in principle be consumed by a downstream release process, but currently +# they are not used in this way and official Terraform CLI releases are instead +# built using a separate process maintained elsewhere. We intend to adopt this +# new process fully later, once other HashiCorp-internal tooling is ready. +# +# Currently this process produces what should be working packages but packages +# NOT suitable for distribution to end-users as official releases, because it +# doesn't include a step to ensure that "terraform version" (and similar) will +# report the intended version number. Consequently we can safely use these +# results for testing purposes, but not yet for release purposes. See the +# "build" job below for a FIXME comment related to version numbers. + on: workflow_dispatch: push: @@ -28,6 +42,7 @@ jobs: runs-on: ubuntu-latest outputs: product-version: ${{ steps.get-product-version.outputs.product-version }} + steps: - uses: actions/checkout@v2 with: @@ -54,6 +69,7 @@ jobs: runs-on: ubuntu-latest outputs: go-version: ${{ steps.get-go-version.outputs.go-version }} + steps: - uses: actions/checkout@v2 - name: Determine Go version @@ -91,7 +107,6 @@ jobs: needs: - get-product-version - get-go-version - strategy: matrix: include: @@ -142,51 +157,90 @@ jobs: name: ${{ env.PKG_NAME }}_${{ needs.get-product-version.outputs.product-version }}_${{ matrix.goos }}_${{ matrix.goarch }}.zip path: out/${{ env.PKG_NAME }}_${{ needs.get-product-version.outputs.product-version }}_${{ matrix.goos }}_${{ matrix.goarch }}.zip - - name: Linux distribution packages - if: ${{ matrix.goos == 'linux' }} + package-linux: + name: "Build Linux distro packages for ${{ matrix.arch }}" + runs-on: ubuntu-latest + needs: + - get-product-version + - build + strategy: + matrix: + include: + - {arch: "386"} + - {arch: "amd64"} + - {arch: "arm"} + - {arch: "arm64"} + fail-fast: false + + env: + os: linux + arch: ${{matrix.arch}} + version: ${{needs.get-product-version.outputs.product-version}} + + steps: + - name: "Download Terraform CLI package" + uses: actions/download-artifact@v2 + id: clipkg + with: + name: terraform_${{ env.version }}_${{ env.os }}_${{ env.arch }}.zip + path: . + - name: Extract packages + run: | + mkdir -p dist + (cd dist && unzip "../terraform_${{ env.version }}_${{ env.os }}_${{ env.arch }}.zip") + mkdir -p out + - name: Build Linux distribution packages uses: hashicorp/package@v1 with: name: "terraform" description: "Terraform enables you to safely and predictably create, change, and improve infrastructure. It is an open source tool that codifies APIs into declarative configuration files that can be shared amongst team members, treated as code, edited, reviewed, and versioned." - arch: ${{ matrix.goarch }} - version: ${{ needs.get-product-version.outputs.product-version }} + arch: ${{ matrix.arch }} + version: ${{ env.version }} maintainer: "HashiCorp" homepage: "https://terraform.io/" license: "MPL-2.0" - binary: "dist/${{ env.PKG_NAME }}" + binary: "dist/terraform" deb_depends: "git" rpm_depends: "git" - - name: Gather Linux distribution package filenames - if: ${{ matrix.goos == 'linux' }} run: | echo "RPM_PACKAGE=$(basename out/*.rpm)" >> $GITHUB_ENV echo "DEB_PACKAGE=$(basename out/*.deb)" >> $GITHUB_ENV - - # FIXME: Generate homebrew packages when targeting macOS. - - - uses: actions/upload-artifact@v2 - if: ${{ matrix.goos == 'linux' }} + - name: "Save .rpm package" + uses: actions/upload-artifact@v2 with: name: ${{ env.RPM_PACKAGE }} path: out/${{ env.RPM_PACKAGE }} - - - uses: actions/upload-artifact@v2 - if: ${{ matrix.goos == 'linux' }} + - name: "Save .deb package" + uses: actions/upload-artifact@v2 with: name: ${{ env.DEB_PACKAGE }} path: out/${{ env.DEB_PACKAGE }} - build-docker: + # TODO: homebrew packages for macOS + #package-homebrew: + # name: Build Homebrew package for darwin_${{ matrix.arch }} + # runs-on: macos-latest + # needs: + # - get-product-version + # - build + # strategy: + # matrix: + # arch: ["amd64", "arm64"] + # fail-fast: false + # ... + + package-docker: name: Build Docker image for linux_${{ matrix.arch }} + runs-on: ubuntu-latest needs: - get-product-version - build - runs-on: ubuntu-latest strategy: matrix: arch: ["amd64"] fail-fast: false + env: repo: ${{github.event.repository.name}} version: ${{needs.get-product-version.outputs.product-version}} @@ -206,8 +260,8 @@ jobs: e2etest-build: name: Build e2etest for ${{ matrix.goos }}_${{ matrix.goarch }} - needs: ["get-go-version"] runs-on: ubuntu-latest + needs: ["get-go-version"] strategy: matrix: # We build test harnesses only for the v1.0 Compatibility Promises @@ -258,7 +312,6 @@ jobs: - get-product-version - build - e2etest-build - strategy: matrix: include: @@ -306,7 +359,6 @@ jobs: - get-product-version - build - e2etest-build - strategy: matrix: include: @@ -353,7 +405,6 @@ jobs: - get-product-version - build - e2etest-build - strategy: matrix: include: From 05d0febf7f23ef0846c336de7bfdf0236cf93576 Mon Sep 17 00:00:00 2001 From: Nick Fagerlund Date: Wed, 5 Jan 2022 14:38:53 -0800 Subject: [PATCH 31/68] Relax test to focus on the behavior we care about (encoded == encoded) The specific output order is meaningless, but it should always be the same after two Encode() calls with identical (ignoring in-memory order) dependency sets. --- internal/states/instance_object_test.go | 27 ++++++++++++++++++------- 1 file changed, 20 insertions(+), 7 deletions(-) diff --git a/internal/states/instance_object_test.go b/internal/states/instance_object_test.go index f8be9743e..8f961330d 100644 --- a/internal/states/instance_object_test.go +++ b/internal/states/instance_object_test.go @@ -12,26 +12,39 @@ func TestResourceInstanceObject_encode(t *testing.T) { value := cty.ObjectVal(map[string]cty.Value{ "foo": cty.True, }) - deps := []addrs.ConfigResource{ + // The in-memory order of resource dependencies is random, since they're an + // unordered set. + depsOne := []addrs.ConfigResource{ addrs.RootModule.Resource(addrs.ManagedResourceMode, "test", "honk"), addrs.RootModule.Child("child").Resource(addrs.ManagedResourceMode, "test", "flub"), addrs.RootModule.Resource(addrs.ManagedResourceMode, "test", "boop"), } - wantDeps := []addrs.ConfigResource{ + depsTwo := []addrs.ConfigResource{ addrs.RootModule.Child("child").Resource(addrs.ManagedResourceMode, "test", "flub"), addrs.RootModule.Resource(addrs.ManagedResourceMode, "test", "boop"), addrs.RootModule.Resource(addrs.ManagedResourceMode, "test", "honk"), } - rio := &ResourceInstanceObject{ + rioOne := &ResourceInstanceObject{ Value: value, Status: ObjectPlanned, - Dependencies: deps, + Dependencies: depsOne, } - rios, err := rio.Encode(value.Type(), 0) + rioTwo := &ResourceInstanceObject{ + Value: value, + Status: ObjectPlanned, + Dependencies: depsTwo, + } + riosOne, err := rioOne.Encode(value.Type(), 0) if err != nil { t.Fatalf("unexpected error: %s", err) } - if diff := cmp.Diff(wantDeps, rios.Dependencies); diff != "" { - t.Errorf("wrong result for deps\n%s", diff) + riosTwo, err := rioTwo.Encode(value.Type(), 0) + if err != nil { + t.Fatalf("unexpected error: %s", err) + } + // However, identical sets of dependencies should always be written to state + // in an identical order, so we don't do meaningless state updates on refresh. + if diff := cmp.Diff(riosOne.Dependencies, riosTwo.Dependencies); diff != "" { + t.Errorf("identical dependencies got encoded in different orders:\n%s", diff) } } From 087c2f06ee52e41f7d7d8d2c41f673df88b8d4d4 Mon Sep 17 00:00:00 2001 From: Martin Atkins Date: Wed, 5 Jan 2022 12:27:09 -0800 Subject: [PATCH 32/68] website: Documentation of how provisioners upload files We recently made a change to how provisioners upload files in order to address an unintended remote code execution vector when using SSH, which revealed that we had not previously documented well enough the expected contract for how provisioners upload files to remote systems, and so some users were depending on unintended consequences of the the bug now fixed. We are retaining the fix on security-related grounds, but this is a good prompt to be clearer in the docs about what exactly Terraform is doing when asked to upload files over SSH and WinRM, so users can understand what is supported and write their configurations accordingly. This also includes an additional section to the v1.1 upgrade guide, since we apparently neglected to document this intentional breaking change in the first draft of that page. Of course, provisioners as a whole remain a last resort, and so we're documenting this as hopefully a helpful aid to those who have no other option, and not meaning in any way to recommend their use for any new use-cases. --- .../resources/provisioners/connection.mdx | 87 +++++++++++++++++- .../language/resources/provisioners/file.mdx | 92 +++++++++++++------ website/docs/language/upgrade-guides/1-1.mdx | 61 ++++++++++++ 3 files changed, 209 insertions(+), 31 deletions(-) diff --git a/website/docs/language/resources/provisioners/connection.mdx b/website/docs/language/resources/provisioners/connection.mdx index 704012199..eb1ccff5f 100644 --- a/website/docs/language/resources/provisioners/connection.mdx +++ b/website/docs/language/resources/provisioners/connection.mdx @@ -102,6 +102,9 @@ block would create a dependency cycle. Defaults to 5 minutes. * `script_path` - The path used to copy scripts meant for remote execution. + For more information, see + [How Provisioners Execute Remote Scripts](#how-provisioners-execute-remote-scripts) + below. **Additional arguments only supported by the `ssh` connection type:** @@ -123,9 +126,7 @@ block would create a dependency cycle. * `host_key` - The public key from the remote host or the signing CA, used to verify the connection. -* `target_platform` - The target platform to connect to. Valid values are `windows` and `unix`. Defaults to `unix` if not set. - - If the platform is set to `windows`, the default `script_path` is `c:\windows\temp\terraform_%RAND%.cmd`, assuming [the SSH default shell](https://docs.microsoft.com/en-us/windows-server/administration/openssh/openssh_server_configuration#configuring-the-default-shell-for-openssh-in-windows) is `cmd.exe`. If the SSH default shell is PowerShell, set `script_path` to `"c:/windows/temp/terraform_%RAND%.ps1"` +* `target_platform` - The target platform to connect to. Valid values are `"windows"` and `"unix"`. Defaults to `"unix"` if not set. **Additional arguments only supported by the `winrm` connection type:** @@ -137,6 +138,11 @@ block would create a dependency cycle. * `cacert` - The CA certificate to validate against. +Provisioners typically assume that the remote system runs Microsoft Windows +when using the `winrm` connection type. Behaviors which would vary based on +the `target_platform` option if using SSH will instead force the +Windows-specific behavior when using WinRM, unless otherwise specified. + ## Connecting through a Bastion Host with SSH @@ -167,3 +173,78 @@ The `ssh` connection also supports the following fields to facilitate connnectio * `bastion_certificate` - The contents of a signed CA Certificate. The certificate argument must be used in conjunction with a `bastion_private_key`. These can be loaded from a file on disk using the [the `file` function](/language/functions/file). + +## How Provisioners Execute Remote Scripts + +Provisioners which execute commands on a remote system via a protocol such as +SSH typically achieve that by uploading a script file to the remote system +and then asking the default shell to execute it. Provisioners use this strategy +because it then allows you to use all of the typical scripting techniques +supported by that shell, including preserving environment variable values +and other context between script statements. + +However, this approach does have some consequences which can be relevant in +some unusual situations, even though this is just an implementation detail +for typical use. + +Most importantly, there must be a suitable location in the remote filesystem +where the provisioner can create the script file. By default, Terraform +chooses a path containing a random number using the following patterns +depending on how `target_platform` is set: + +* `"unix"`: `/tmp/terraform_%RAND%.sh` +* `"windows"`: `C:/windows/temp/terraform_%RAND%.cmd` + +In both cases above, the provisioner replaces the sequence `%RAND%` with +some randomly-chosen decimal digits. + +Provisioners cannot react directly to remote environment variables such as +`TMPDIR` or use functions like `mktemp` because they run on the system where +Terraform is running, not on the remote system. Therefore if your remote +system doesn't use the filesystem layout expected by these default paths +then you can override it using the `script_path` option in your `connection` +block: + +```hcl +connection { + # ... + script_path = "H:/terraform-temp/script_%RAND%.sh" +} +``` + +As with the default patterns, provisioners will replace the sequence `%RAND%` +with randomly-selected decimal digits, to reduce the likelihood of collisions +between multiple provisioners running concurrently. + +If your target system is running Windows, we recommend uses forward slashes +instead of backslashes, despite the typical convention on Windows, because +the Terraform language uses backslash as the quoted string escape character. + +### Executing Scripts using SSH/SCP + +When using the SSH protocol, provisioners upload their script files using +the Secure Copy Protocol (SCP), which requires that the remote system have +the `scp` service program installed to act as the server for that protocol. + +Provisioners will pass the chosen script path (after `%RAND%` +expansion) directly to the remote `scp` process, which is responsible for +interpreting it. With the default configuration of `scp` as distributed with +OpenSSH, you can place temporary scripts in the home directory of the remote +user by specifying a relative path: + +```hcl +connection { + type = "ssh" + # ... + script_path = "terraform_provisioner_%RAND%.sh" +} +``` + +-> **Warning:** In Terraform v1.0 and earlier, the built-in provisioners +incorrectly passed the `script_path` value to `scp` through a remote shell and +thus allowed it to be subject to arbitrary shell expansion, and thus created an +unintended opportunity for remote code execution. Terraform v1.1 and later +will now correctly quote and escape the script path to ensure that the +remote `scp` process can always interpret it literally. For modules that will +be used with Terraform v1.0 and earlier, avoid using untrusted external +values as part of the `script_path` argument. diff --git a/website/docs/language/resources/provisioners/file.mdx b/website/docs/language/resources/provisioners/file.mdx index 06b22758b..83dfb7a90 100644 --- a/website/docs/language/resources/provisioners/file.mdx +++ b/website/docs/language/resources/provisioners/file.mdx @@ -52,41 +52,77 @@ resource "aws_instance" "web" { The following arguments are supported: -* `source` - This is the source file or folder. It can be specified as - relative to the current working directory or as an absolute path. This - attribute cannot be specified with `content`. +* `source` - The source file or directory. Specify it either relative to the + current working directory or as an absolute path. + This argument cannot be combined with `content`. -* `content` - This is the content to copy on the destination. If destination is a file, - the content will be written on that file, in case of a directory a file named - `tf-file-content` is created. It's recommended to use a file as the destination. A - [`template_file`](https://registry.terraform.io/providers/hashicorp/template/latest/docs/data-sources/file) might be referenced in here, or - any interpolation syntax. This attribute cannot be specified with `source`. +* `content` - The direct content to copy on the destination. + If destination is a file, the content will be written on that file. In case + of a directory, a file named `tf-file-content` is created inside that + directory. We recommend using a file as the destination when using `content`. + This argument cannot be combined with `source`. -* `destination` - (Required) This is the destination path. It must be specified as an - absolute path. +* `destination` - (Required) The destination path to write to on the remote + system. See [Destination Paths](#destination-paths) below for more + information. + +## Destination Paths + +The path you provide in the `destination` argument will be evaluated by the +remote system, rather than by Terraform itself. Therefore the valid values +for that argument can vary depending on the operating system and remote access +software running on the target. + +When connecting over SSH, the `file` provisioner passes the given destination +path verbatim to the `scp` program on the remote host. By default, OpenSSH's +`scp` implementation runs in the remote user's home directory and so you can +specify a relative path to upload into that home directory, or an absolute +path to upload to some other location. The remote `scp` process will run with +the access level of the user specified in the `connection` block, and so +permissions may prevent writing directly to locations outside of the home +directory. + +Because WinRM has no corresponding file transfer protocol, for WinRM +connections the `file` provisioner uses a more complex process: + +1. Generate a temporary filename in the directory given in the remote system's + `TEMP` environment variable, using a pseudorandom UUID for uniqueness. +2. Use sequential generated `echo` commands over WinRM to gradually append + base64-encoded chunks of the source file to the chosen temporary file. +3. Use an uploaded PowerShell script to read the temporary file, base64-decode, + and write the raw result into the destination file. + +In the WinRM case, the destination path is therefore interpreted by PowerShell +and so you must take care not to use any meta-characters that PowerShell might +interpret. In particular, avoid including any untrusted external input in +your `destination` argument when using WinRM, because it can serve as a vector +for arbitrary PowerShell code execution on the remote system. + +Modern Windows systems support running an OpenSSH server, so we strongly +recommend choosing SSH over WinRM whereever possible, and using WinRM only as +a last resort when working with obsolete Windows versions. ## Directory Uploads -The file provisioner is also able to upload a complete directory to the remote machine. -When uploading a directory, there are a few important things you should know. +The `file` provisioner can upload a complete directory to the remote machine. +When uploading a directory, there are some additional considerations. -First, when using the `ssh` connection type the destination directory must already exist. -If you need to create it, use a remote-exec provisioner just prior to the file provisioner -in order to create the directory. When using the `winrm` connection type the destination -directory will be created for you if it doesn't already exist. +When using the `ssh` connection type the destination directory must already +exist. If you need to create it, use a remote-exec provisioner just prior to +the file provisioner in order to create the directory -Next, the existence of a trailing slash on the source path will determine whether the -directory name will be embedded within the destination, or whether the destination will -be created. An example explains this best: +When using the `winrm` connection type the destination directory will be +created for you if it doesn't already exist. -If the source is `/foo` (no trailing slash), and the destination is `/tmp`, then the contents -of `/foo` on the local machine will be uploaded to `/tmp/foo` on the remote machine. The -`foo` directory on the remote machine will be created by Terraform. +The existence of a trailing slash on the source path will determine whether the +directory name will be embedded within the destination, or whether the +destination will be created. For example: -If the source, however, is `/foo/` (a trailing slash is present), and the destination is -`/tmp`, then the contents of `/foo` will be uploaded directly into `/tmp`. +* If the source is `/foo` (no trailing slash), and the destination is `/tmp`, + then the contents of `/foo` on the local machine will be uploaded to + `/tmp/foo` on the remote machine. The `foo` directory on the remote machine + will be created by Terraform. -This behavior was adopted from the standard behavior of -[rsync](https://linux.die.net/man/1/rsync). - --> **Note:** Under the covers, rsync may or may not be used. +* If the source, however, is `/foo/` (a trailing slash is present), and the + destination is `/tmp`, then the contents of `/foo` will be uploaded directly + into `/tmp`. diff --git a/website/docs/language/upgrade-guides/1-1.mdx b/website/docs/language/upgrade-guides/1-1.mdx index faafcf147..aa16a4838 100644 --- a/website/docs/language/upgrade-guides/1-1.mdx +++ b/website/docs/language/upgrade-guides/1-1.mdx @@ -23,6 +23,7 @@ small number of users, described in the following sections. * [Terraform requires macOS 10.13 High Sierra or later](#terraform-requires-macos-1013-high-sierra-or-later) * [Preparation for removing Azure AD Graph support in the AzureRM Backend](#preparation-for-removing-azure-ad-graph-support-in-the-azurerm-backend) +* [Interpretation of remote file paths in the `remote-exec` and `file` provisioners](#interpretation-of-remote-file-paths-in-the-remote-exec-and-file-provisioners) * [Changes to `terraform graph`](#changes-to-terraform-graph) * [Changes to `terraform state mv`](#changes-to-terraform-state-mv) * [Provider checksum verification in `terraform apply`](#provider-checksum-verification-in-terraform-apply) @@ -54,6 +55,66 @@ in the near future to prepare for the final removal of Azure AD Graph support in a later Terraform release. However, no immediate change is required before upgrading to Terraform v1.1. +## Interpretation of remote file paths in the `remote-exec` and `file` provisioners + +When using Terraform's built-in `remote-exec` and `file` provisioners, there +are two situations where Terraform internally uses +[Secure Copy Protocol](https://en.wikipedia.org/wiki/Secure_copy_protocol) +(SCP) to upload files to the remote system at a configuration-specified +location: + +* For [the `file` provisioner](/language/resources/provisioners/file), + the primary functionality is to upload a file using SCP, and the + `destination` argument specifies the remote path where the file is to be + written. +* For [the `remote-exec` provisioner](/language/resources/provisioners/remote-exec), + internally the provisioner works by uploading the given scripts to files + on the remote system and then executing them. By default the provisioner + selects a temporary filename automatically, but a module author can + potentially override that location using the `script_path` argument in the + associated [`connection` block](https://www.terraform.io/language/resources/provisioners/connection). + +If you are not using either of the specific arguments mentioned above, no +configuration changes will be required to upgrade to Terraform v1.1. + +These provisioners both passing the specified remote paths to the `scp` service +program on the remote system. In Terraform v1.0 and earlier, the provisioners +were passing the paths to `scp` in a way that was inadvertently subject to +_shell expansion_. That inadvertently allowed for convenient shorthands +such as `~/example` and `$HOME/example` to write into the target user's +home directory, but also offered an undesirable opportunity for accidental +remote code execution, such as `$(arbitrary-program)`. + +In Terraform v1.1 both of the above remote path arguments are passed _verbatim_ +to the remote `scp` service, without any prior shell expansion. For that reason, +shell-defined expansion tokens such as `~` and environment variable references +will no longer be evaluated. + +By default, the OpenSSH server and the program `scp` together already interpret +relative paths as relative to the target user's home directory, and so +module authors can specify relative paths without any special metacharacters +in order to request uploading into that default location: + +```hcl + provisioner "file" { + source = "local.txt" + destination = "remote.txt" + } +``` + +If you maintain a module that was depending on expansion of `~/`, `$HOME/`, +`${HOME}`/ or similar, remove that prefix so that your module instead specifies +just a relative path. + +This is an intentional compatibility regression which we accepted after due +consideration of +[the pragmatic exceptions to our compatibility promises](/language/v1-compatibility-promises#pragmatic-exceptions). +Specifically, this behavior offered an unintended and non-obvious avenue for +arbitrary code execution on the remote system if either of the above arguments +were populated from outside input, and an alternative approach is available +which doesn't have that drawback, and this is therefore justified on security +grounds. + ## Changes to `terraform graph` The `terraform graph` command exists to help with debugging and so it From 4ec1feaa3d41a516b87fbf80191203081a8de62e Mon Sep 17 00:00:00 2001 From: Martin Atkins Date: Wed, 5 Jan 2022 15:10:05 -0800 Subject: [PATCH 33/68] build: CGO_ENABLED when building for macOS Normally when we cross-compile we do so without CGo, because we don't have suitable C headers available for systems other than the host. However, building for macOS on macOS is special because there are sufficient headers available on darwin_amd64 to build for both darwin_amd64 _and_ darwin_arm64. Also, we _must_ use CGo on macOS because the system resolver is only available via darwin's libc, and so building without CGo produces executables that don't resolve hostnames correctly. This is a conditional in bash to avoid having to duplicate the entire step. Perhaps later we'll find a more general version of this which can avoid the special case, but this is sufficient for the moment. --- .github/workflows/build.yml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index c0fd888b3..0aefa68c2 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -147,8 +147,17 @@ jobs: env: GOOS: ${{ matrix.goos }} GOARCH: ${{ matrix.goarch }} + ACTIONSOS: ${{ matrix.runson }} run: | mkdir dist out + if [ "$ACTIONSOS" == "macos-latest" ] && [ "$GOOS" == "darwin" ]; then + # When building for macOS _on_ macOS we must force CGo to get + # correct hostname resolution behavior. (This must be conditional + # because other cross-compiles won't have suitable headers + # available to use CGo; darwin_amd64 has suitable headers to + # cross-build for darwin_arm64.) + export CGO_ENABLED=1 + fi go build -ldflags "-w -s" -o dist/ . zip -r -j out/${{ env.PKG_NAME }}_${{ needs.get-product-version.outputs.product-version }}_${{ matrix.goos }}_${{ matrix.goarch }}.zip dist/ From 3ac334f267335d969ca3991d719c4489f7f62657 Mon Sep 17 00:00:00 2001 From: Mukesh Kumar Date: Fri, 7 Jan 2022 05:52:12 +0530 Subject: [PATCH 34/68] Update website/docs/language/values/locals.mdx Thank you. Your suggestion to use "instead of" makes the sentence even more easy to understand. Co-authored-by: Laura Pacilio <83350965+laurapacilio@users.noreply.github.com> --- website/docs/language/values/locals.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/language/values/locals.mdx b/website/docs/language/values/locals.mdx index 6706ad91d..dbcd9f615 100644 --- a/website/docs/language/values/locals.mdx +++ b/website/docs/language/values/locals.mdx @@ -12,7 +12,7 @@ description: >- > tutorial on HashiCorp Learn. A local value assigns a name to an [expression](/language/expressions), -so you can use the name multiple times within a module without repeating +so you can use the name multiple times within a module instead of repeating the expression. If you're familiar with traditional programming languages, it can be useful to From 5171c89e1eaa184e827d1c29486ab026e7c8c73c Mon Sep 17 00:00:00 2001 From: SuperKooks Date: Sat, 8 Jan 2022 14:41:20 +1100 Subject: [PATCH 35/68] Fix autocomplete for workspace subcommands --- internal/command/autocomplete.go | 6 +----- internal/command/workspace_delete.go | 1 - internal/command/workspace_new.go | 1 - internal/command/workspace_select.go | 1 - 4 files changed, 1 insertion(+), 8 deletions(-) diff --git a/internal/command/autocomplete.go b/internal/command/autocomplete.go index 4b19c1c95..87e765fe8 100644 --- a/internal/command/autocomplete.go +++ b/internal/command/autocomplete.go @@ -19,11 +19,7 @@ var completePredictModuleSource = complete.PredictAnything type completePredictSequence []complete.Predictor func (s completePredictSequence) Predict(a complete.Args) []string { - // Only one level of command is stripped off the prefix of a.Completed - // here, so nested subcommands like "workspace new" will need to provide - // dummy entries (e.g. complete.PredictNothing) as placeholders for - // all but the first subcommand. For example, "workspace new" needs - // one placeholder for the argument "new". + // Nested subcommands do not require any placeholder entry for their subcommand name. idx := len(a.Completed) if idx >= len(s) { return nil diff --git a/internal/command/workspace_delete.go b/internal/command/workspace_delete.go index 654aac581..013db3966 100644 --- a/internal/command/workspace_delete.go +++ b/internal/command/workspace_delete.go @@ -190,7 +190,6 @@ func (c *WorkspaceDeleteCommand) Run(args []string) int { func (c *WorkspaceDeleteCommand) AutocompleteArgs() complete.Predictor { return completePredictSequence{ - complete.PredictNothing, // the "select" subcommand itself (already matched) c.completePredictWorkspaceName(), complete.PredictDirs(""), } diff --git a/internal/command/workspace_new.go b/internal/command/workspace_new.go index 41e657bef..cd28e6986 100644 --- a/internal/command/workspace_new.go +++ b/internal/command/workspace_new.go @@ -167,7 +167,6 @@ func (c *WorkspaceNewCommand) Run(args []string) int { func (c *WorkspaceNewCommand) AutocompleteArgs() complete.Predictor { return completePredictSequence{ - complete.PredictNothing, // the "new" subcommand itself (already matched) complete.PredictAnything, complete.PredictDirs(""), } diff --git a/internal/command/workspace_select.go b/internal/command/workspace_select.go index 1f98ec55e..e257b59d7 100644 --- a/internal/command/workspace_select.go +++ b/internal/command/workspace_select.go @@ -117,7 +117,6 @@ func (c *WorkspaceSelectCommand) Run(args []string) int { func (c *WorkspaceSelectCommand) AutocompleteArgs() complete.Predictor { return completePredictSequence{ - complete.PredictNothing, // the "select" subcommand itself (already matched) c.completePredictWorkspaceName(), complete.PredictDirs(""), } From 171e7ef6d9fa48e37d5fa252cd7823b7bc1d9db5 Mon Sep 17 00:00:00 2001 From: Martin Atkins Date: Mon, 10 Jan 2022 09:49:28 -0800 Subject: [PATCH 36/68] core: Invalid for_each argument messaging improvements Our original messaging here was largely just lifted from the equivalent message for unknown values in "count", and it didn't really include any specific advice on how to update a configuration to make for_each valid, instead focusing only on the workaround of using the -target planning option. It's tough to pack in a fully-actionable suggestion here since unknown values in for_each keys tends to be a gnarly architectural problem rather than a local quirk -- when data flows between modules it can sometimes be unclear whether it'll end up being used in a context which allows unknown values. I did my best to summarize the advice we've been giving in community forum though, in the hope that more people will be able to address this for themselves without asking for help, until we're one day able to smooth this out better with a mechanism such as "partial apply". --- internal/terraform/eval_for_each.go | 17 +++++++++++++---- internal/terraform/eval_for_each_test.go | 12 ++++++------ 2 files changed, 19 insertions(+), 10 deletions(-) diff --git a/internal/terraform/eval_for_each.go b/internal/terraform/eval_for_each.go index fccf58f91..ba7a8c9bc 100644 --- a/internal/terraform/eval_for_each.go +++ b/internal/terraform/eval_for_each.go @@ -78,6 +78,9 @@ func evaluateForEachExpressionValue(expr hcl.Expression, ctx EvalContext, allowU } ty := forEachVal.Type() + const errInvalidUnknownDetailMap = "The \"for_each\" map includes keys derived from resource attributes that cannot be determined until apply, and so Terraform cannot determine the full set of keys that will identify the instances of this resource.\n\nWhen working with unknown values in for_each, it's better to define the map keys statically in your configuration and place apply-time results only in the map values.\n\nAlternatively, you could use the -target planning option to first apply only the resources that the for_each value depends on, and then apply a second time to fully converge." + const errInvalidUnknownDetailSet = "The \"for_each\" set includes values derived from resource attributes that cannot be determined until apply, and so Terraform cannot determine the full set of keys that will identify the instances of this resource.\n\nWhen working with unknown values in for_each, it's better to use a map value where the keys are defined statically in your configuration and where only the values contain apply-time results.\n\nAlternatively, you could use the -target planning option to first apply only the resources that the for_each value depends on, and then apply a second time to fully converge." + switch { case forEachVal.IsNull(): diags = diags.Append(&hcl.Diagnostic{ @@ -91,10 +94,18 @@ func evaluateForEachExpressionValue(expr hcl.Expression, ctx EvalContext, allowU return nullMap, diags case !forEachVal.IsKnown(): if !allowUnknown { + var detailMsg string + switch { + case ty.IsSetType(): + detailMsg = errInvalidUnknownDetailSet + default: + detailMsg = errInvalidUnknownDetailMap + } + diags = diags.Append(&hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid for_each argument", - Detail: errInvalidForEachUnknownDetail, + Detail: detailMsg, Subject: expr.Range().Ptr(), Expression: expr, EvalContext: hclCtx, @@ -129,7 +140,7 @@ func evaluateForEachExpressionValue(expr hcl.Expression, ctx EvalContext, allowU diags = diags.Append(&hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid for_each argument", - Detail: errInvalidForEachUnknownDetail, + Detail: errInvalidUnknownDetailSet, Subject: expr.Range().Ptr(), Expression: expr, EvalContext: hclCtx, @@ -172,8 +183,6 @@ func evaluateForEachExpressionValue(expr hcl.Expression, ctx EvalContext, allowU return forEachVal, nil } -const errInvalidForEachUnknownDetail = `The "for_each" value depends on resource attributes that cannot be determined until apply, so Terraform cannot predict how many instances will be created. To work around this, use the -target argument to first apply only the resources that the for_each depends on.` - // markSafeLengthInt allows calling LengthInt on marked values safely func markSafeLengthInt(val cty.Value) int { v, _ := val.UnmarkDeep() diff --git a/internal/terraform/eval_for_each_test.go b/internal/terraform/eval_for_each_test.go index 28a025ab0..be4b551cf 100644 --- a/internal/terraform/eval_for_each_test.go +++ b/internal/terraform/eval_for_each_test.go @@ -114,12 +114,12 @@ func TestEvaluateForEachExpression_errors(t *testing.T) { "unknown string set": { hcltest.MockExprLiteral(cty.UnknownVal(cty.Set(cty.String))), "Invalid for_each argument", - "depends on resource attributes that cannot be determined until apply", + "set includes values derived from resource attributes that cannot be determined until apply", }, "unknown map": { hcltest.MockExprLiteral(cty.UnknownVal(cty.Map(cty.Bool))), "Invalid for_each argument", - "depends on resource attributes that cannot be determined until apply", + "map includes keys derived from resource attributes that cannot be determined until apply", }, "marked map": { hcltest.MockExprLiteral(cty.MapVal(map[string]cty.Value{ @@ -142,12 +142,12 @@ func TestEvaluateForEachExpression_errors(t *testing.T) { "set containing unknown value": { hcltest.MockExprLiteral(cty.SetVal([]cty.Value{cty.UnknownVal(cty.String)})), "Invalid for_each argument", - "depends on resource attributes that cannot be determined until apply", + "set includes values derived from resource attributes that cannot be determined until apply", }, "set containing dynamic unknown value": { hcltest.MockExprLiteral(cty.SetVal([]cty.Value{cty.UnknownVal(cty.DynamicPseudoType)})), "Invalid for_each argument", - "depends on resource attributes that cannot be determined until apply", + "set includes values derived from resource attributes that cannot be determined until apply", }, "set containing marked values": { hcltest.MockExprLiteral(cty.SetVal([]cty.Value{cty.StringVal("beep").Mark(marks.Sensitive), cty.StringVal("boop")})), @@ -169,10 +169,10 @@ func TestEvaluateForEachExpression_errors(t *testing.T) { t.Errorf("wrong diagnostic severity %#v; want %#v", got, want) } if got, want := diags[0].Description().Summary, test.Summary; got != want { - t.Errorf("wrong diagnostic summary %#v; want %#v", got, want) + t.Errorf("wrong diagnostic summary\ngot: %s\nwant: %s", got, want) } if got, want := diags[0].Description().Detail, test.DetailSubstring; !strings.Contains(got, want) { - t.Errorf("wrong diagnostic detail %#v; want %#v", got, want) + t.Errorf("wrong diagnostic detail\ngot: %s\nwant substring: %s", got, want) } if fromExpr := diags[0].FromExpr(); fromExpr != nil { if fromExpr.Expression == nil { From a579ae15b083d782245d6ae359911421808c80ea Mon Sep 17 00:00:00 2001 From: Martin Atkins Date: Mon, 10 Jan 2022 12:24:53 -0800 Subject: [PATCH 37/68] Update CHANGELOG.md --- CHANGELOG.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index f61750fda..59307b611 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,8 @@ ## 1.2.0 (Unreleased) +ENHANCEMENTS: + +* The "Invalid for_each argument" error message for unknown maps/sets now includes an additional paragraph to try to help the user notice they can move apply-time values into the map _values_ instead of the map _keys_, and thus avoid the problem without resorting to `-target`. [GH-30327] ## Previous Releases From 483c38aca1b323dac9fda9b682f57b9639e107ae Mon Sep 17 00:00:00 2001 From: Martin Atkins Date: Thu, 2 Dec 2021 17:41:06 -0800 Subject: [PATCH 38/68] core: Remove TestContext2Validate_PlanGraphBuilder This test seems to be a holdover from the many-moons-ago switch from one graph for all operations to separate graphs for plan and apply. It is effectively just a copy of a subset of the content of the Context.Validate function and is a maintainability hazard because it tends to lag behind updates to that function unless changes there happen to make it fail. This test doesn't cover anything that the other validate context tests don't exercise as an implementation detail of calling Context.Validate, so I've just removed it with no replacement. --- internal/terraform/context_validate_test.go | 26 --------------------- 1 file changed, 26 deletions(-) diff --git a/internal/terraform/context_validate_test.go b/internal/terraform/context_validate_test.go index 1ed5ad425..a02d85cdd 100644 --- a/internal/terraform/context_validate_test.go +++ b/internal/terraform/context_validate_test.go @@ -1187,32 +1187,6 @@ resource "aws_instance" "foo" { } } -// Manually validate using the new PlanGraphBuilder -func TestContext2Validate_PlanGraphBuilder(t *testing.T) { - fixture := contextFixtureApplyVars(t) - opts := fixture.ContextOpts() - c := testContext2(t, opts) - - graph, diags := ValidateGraphBuilder(&PlanGraphBuilder{ - Config: fixture.Config, - State: states.NewState(), - Plugins: c.plugins, - }).Build(addrs.RootModuleInstance) - if diags.HasErrors() { - t.Fatalf("errors from PlanGraphBuilder: %s", diags.Err()) - } - defer c.acquireRun("validate-test")() - walker, diags := c.walk(graph, walkValidate, &graphWalkOpts{ - Config: fixture.Config, - }) - if diags.HasErrors() { - t.Fatal(diags.Err()) - } - if len(walker.NonFatalDiagnostics) > 0 { - t.Fatal(walker.NonFatalDiagnostics.Err()) - } -} - func TestContext2Validate_invalidOutput(t *testing.T) { m := testModuleInline(t, map[string]string{ "main.tf": ` From 37b1413ab3ab23af368f84766eb3b155e1f0d8bf Mon Sep 17 00:00:00 2001 From: Martin Atkins Date: Wed, 10 Nov 2021 17:29:45 -0800 Subject: [PATCH 39/68] core: Handle root and child module input variables consistently Previously we had a significant discrepancy between these two situations: we wrote the raw root module variables directly into the EvalContext and then applied type conversions only at expression evaluation time, while for child modules we converted and validated the values while visiting the variable graph node and wrote only the _final_ value into the EvalContext. This confusion seems to have been the root cause for #29899, where validation rules for root module variables were being applied at the wrong point in the process, prior to type conversion. To fix that bug and also make similar mistakes less likely in the future, I've made the root module variable handling more like the child module variable handling in the following ways: - The "raw value" (exactly as given by the user) lives only in the graph node representing the variable, which mirrors how the _expression_ for a child module variable lives in its graph node. This means that the flow for the two is the same except that there's no expression evaluation step for root module variables, because they arrive as constant values from the caller. - The set of variable values in the EvalContext is always only "final" values, after type conversion is complete. That in turn means we no longer need to do "just in time" conversion in evaluationStateData.GetInputVariable, and can just return the value exactly as stored, which is consistent with how we handle all other references between objects. This diff is noisier than I'd like because of how much it takes to wire a new argument (the raw variable values) through to the plan graph builder, but those changes are pretty mechanical and the interesting logic lives inside the plan graph builder itself, in NodeRootVariable, and the shared helper functions in eval_variable.go. While here I also took the opportunity to fix a historical API wart in EvalContext, where SetModuleCallArguments was built to take a set of variable values all at once but our current caller always calls with only one at a time. That is now just SetModuleCallArgument singular, to match with the new SetRootModuleArgument to deal with root module variables. --- internal/terraform/context_apply.go | 69 +-- internal/terraform/context_eval.go | 12 +- internal/terraform/context_import.go | 16 +- internal/terraform/context_plan.go | 74 +-- internal/terraform/context_validate.go | 24 +- internal/terraform/context_walk.go | 24 +- internal/terraform/eval_context.go | 22 +- internal/terraform/eval_context_builtin.go | 29 +- internal/terraform/eval_context_mock.go | 38 +- internal/terraform/eval_variable.go | 107 ++++- internal/terraform/eval_variable_test.go | 426 ++++++++++++++++++ internal/terraform/evaluate.go | 40 +- internal/terraform/graph_builder_apply.go | 7 +- .../terraform/graph_builder_destroy_plan.go | 5 + internal/terraform/graph_builder_eval.go | 7 +- internal/terraform/graph_builder_import.go | 7 +- internal/terraform/graph_builder_plan.go | 7 +- internal/terraform/node_module_variable.go | 125 ++--- internal/terraform/node_root_variable.go | 59 ++- internal/terraform/node_root_variable_test.go | 166 ++++++- internal/terraform/transform_variable.go | 5 +- 21 files changed, 1012 insertions(+), 257 deletions(-) create mode 100644 internal/terraform/eval_variable_test.go diff --git a/internal/terraform/context_apply.go b/internal/terraform/context_apply.go index e5d2702bc..42520b03d 100644 --- a/internal/terraform/context_apply.go +++ b/internal/terraform/context_apply.go @@ -30,30 +30,11 @@ func (c *Context) Apply(plan *plans.Plan, config *configs.Config) (*states.State return nil, diags } - variables := InputValues{} - for name, dyVal := range plan.VariableValues { - val, err := dyVal.Decode(cty.DynamicPseudoType) - if err != nil { - diags = diags.Append(tfdiags.Sourceless( - tfdiags.Error, - "Invalid variable value in plan", - fmt.Sprintf("Invalid value for variable %q recorded in plan file: %s.", name, err), - )) - continue - } - - variables[name] = &InputValue{ - Value: val, - SourceType: ValueFromPlan, - } - } - workingState := plan.PriorState.DeepCopy() walker, walkDiags := c.walk(graph, operation, &graphWalkOpts{ - Config: config, - InputState: workingState, - Changes: plan.Changes, - RootVariableValues: variables, + Config: config, + InputState: workingState, + Changes: plan.Changes, }) diags = diags.Append(walker.NonFatalDiagnostics) diags = diags.Append(walkDiags) @@ -83,15 +64,43 @@ Note that the -target option is not suitable for routine use, and is provided on } func (c *Context) applyGraph(plan *plans.Plan, config *configs.Config, validate bool) (*Graph, walkOperation, tfdiags.Diagnostics) { - graph, diags := (&ApplyGraphBuilder{ - Config: config, - Changes: plan.Changes, - State: plan.PriorState, - Plugins: c.plugins, - Targets: plan.TargetAddrs, - ForceReplace: plan.ForceReplaceAddrs, - Validate: validate, + var diags tfdiags.Diagnostics + + variables := InputValues{} + for name, dyVal := range plan.VariableValues { + val, err := dyVal.Decode(cty.DynamicPseudoType) + if err != nil { + diags = diags.Append(tfdiags.Sourceless( + tfdiags.Error, + "Invalid variable value in plan", + fmt.Sprintf("Invalid value for variable %q recorded in plan file: %s.", name, err), + )) + continue + } + + variables[name] = &InputValue{ + Value: val, + SourceType: ValueFromPlan, + } + } + if diags.HasErrors() { + return nil, walkApply, diags + } + + graph, moreDiags := (&ApplyGraphBuilder{ + Config: config, + Changes: plan.Changes, + State: plan.PriorState, + RootVariableValues: variables, + Plugins: c.plugins, + Targets: plan.TargetAddrs, + ForceReplace: plan.ForceReplaceAddrs, + Validate: validate, }).Build(addrs.RootModuleInstance) + diags = diags.Append(moreDiags) + if moreDiags.HasErrors() { + return nil, walkApply, diags + } operation := walkApply if plan.UIMode == plans.DestroyMode { diff --git a/internal/terraform/context_eval.go b/internal/terraform/context_eval.go index efc24767c..c6af77635 100644 --- a/internal/terraform/context_eval.go +++ b/internal/terraform/context_eval.go @@ -60,9 +60,10 @@ func (c *Context) Eval(config *configs.Config, state *states.State, moduleAddr a log.Printf("[DEBUG] Building and walking 'eval' graph") graph, moreDiags := (&EvalGraphBuilder{ - Config: config, - State: state, - Plugins: c.plugins, + Config: config, + State: state, + RootVariableValues: variables, + Plugins: c.plugins, }).Build(addrs.RootModuleInstance) diags = diags.Append(moreDiags) if moreDiags.HasErrors() { @@ -70,9 +71,8 @@ func (c *Context) Eval(config *configs.Config, state *states.State, moduleAddr a } walkOpts := &graphWalkOpts{ - InputState: state, - Config: config, - RootVariableValues: variables, + InputState: state, + Config: config, } walker, moreDiags = c.walk(graph, walkEval, walkOpts) diff --git a/internal/terraform/context_import.go b/internal/terraform/context_import.go index af17cbd62..b5417405f 100644 --- a/internal/terraform/context_import.go +++ b/internal/terraform/context_import.go @@ -53,11 +53,14 @@ func (c *Context) Import(config *configs.Config, prevRunState *states.State, opt log.Printf("[DEBUG] Building and walking import graph") + variables := mergeDefaultInputVariableValues(opts.SetVariables, config.Module.Variables) + // Initialize our graph builder builder := &ImportGraphBuilder{ - ImportTargets: opts.Targets, - Config: config, - Plugins: c.plugins, + ImportTargets: opts.Targets, + Config: config, + RootVariableValues: variables, + Plugins: c.plugins, } // Build the graph @@ -67,13 +70,10 @@ func (c *Context) Import(config *configs.Config, prevRunState *states.State, opt return state, diags } - variables := mergeDefaultInputVariableValues(opts.SetVariables, config.Module.Variables) - // Walk it walker, walkDiags := c.walk(graph, walkImport, &graphWalkOpts{ - Config: config, - InputState: state, - RootVariableValues: variables, + Config: config, + InputState: state, }) diags = diags.Append(walkDiags) if walkDiags.HasErrors() { diff --git a/internal/terraform/context_plan.go b/internal/terraform/context_plan.go index 3f860ef1b..0b3c97f14 100644 --- a/internal/terraform/context_plan.go +++ b/internal/terraform/context_plan.go @@ -125,11 +125,11 @@ The -target option is not for routine use, and is provided only for exceptional var planDiags tfdiags.Diagnostics switch opts.Mode { case plans.NormalMode: - plan, planDiags = c.plan(config, prevRunState, variables, opts) + plan, planDiags = c.plan(config, prevRunState, opts) case plans.DestroyMode: - plan, planDiags = c.destroyPlan(config, prevRunState, variables, opts) + plan, planDiags = c.destroyPlan(config, prevRunState, opts) case plans.RefreshOnlyMode: - plan, planDiags = c.refreshOnlyPlan(config, prevRunState, variables, opts) + plan, planDiags = c.refreshOnlyPlan(config, prevRunState, opts) default: panic(fmt.Sprintf("unsupported plan mode %s", opts.Mode)) } @@ -172,14 +172,14 @@ var DefaultPlanOpts = &PlanOpts{ Mode: plans.NormalMode, } -func (c *Context) plan(config *configs.Config, prevRunState *states.State, rootVariables InputValues, opts *PlanOpts) (*plans.Plan, tfdiags.Diagnostics) { +func (c *Context) plan(config *configs.Config, prevRunState *states.State, opts *PlanOpts) (*plans.Plan, tfdiags.Diagnostics) { var diags tfdiags.Diagnostics if opts.Mode != plans.NormalMode { panic(fmt.Sprintf("called Context.plan with %s", opts.Mode)) } - plan, walkDiags := c.planWalk(config, prevRunState, rootVariables, opts) + plan, walkDiags := c.planWalk(config, prevRunState, opts) diags = diags.Append(walkDiags) if diags.HasErrors() { return nil, diags @@ -194,14 +194,14 @@ func (c *Context) plan(config *configs.Config, prevRunState *states.State, rootV return plan, diags } -func (c *Context) refreshOnlyPlan(config *configs.Config, prevRunState *states.State, rootVariables InputValues, opts *PlanOpts) (*plans.Plan, tfdiags.Diagnostics) { +func (c *Context) refreshOnlyPlan(config *configs.Config, prevRunState *states.State, opts *PlanOpts) (*plans.Plan, tfdiags.Diagnostics) { var diags tfdiags.Diagnostics if opts.Mode != plans.RefreshOnlyMode { panic(fmt.Sprintf("called Context.refreshOnlyPlan with %s", opts.Mode)) } - plan, walkDiags := c.planWalk(config, prevRunState, rootVariables, opts) + plan, walkDiags := c.planWalk(config, prevRunState, opts) diags = diags.Append(walkDiags) if diags.HasErrors() { return nil, diags @@ -235,7 +235,7 @@ func (c *Context) refreshOnlyPlan(config *configs.Config, prevRunState *states.S return plan, diags } -func (c *Context) destroyPlan(config *configs.Config, prevRunState *states.State, rootVariables InputValues, opts *PlanOpts) (*plans.Plan, tfdiags.Diagnostics) { +func (c *Context) destroyPlan(config *configs.Config, prevRunState *states.State, opts *PlanOpts) (*plans.Plan, tfdiags.Diagnostics) { var diags tfdiags.Diagnostics pendingPlan := &plans.Plan{} @@ -260,7 +260,7 @@ func (c *Context) destroyPlan(config *configs.Config, prevRunState *states.State log.Printf("[TRACE] Context.destroyPlan: calling Context.plan to get the effect of refreshing the prior state") normalOpts := *opts normalOpts.Mode = plans.NormalMode - refreshPlan, refreshDiags := c.plan(config, prevRunState, rootVariables, &normalOpts) + refreshPlan, refreshDiags := c.plan(config, prevRunState, &normalOpts) if refreshDiags.HasErrors() { // NOTE: Normally we'd append diagnostics regardless of whether // there are errors, just in case there are warnings we'd want to @@ -291,7 +291,7 @@ func (c *Context) destroyPlan(config *configs.Config, prevRunState *states.State priorState = pendingPlan.PriorState } - destroyPlan, walkDiags := c.planWalk(config, priorState, rootVariables, opts) + destroyPlan, walkDiags := c.planWalk(config, priorState, opts) diags = diags.Append(walkDiags) if walkDiags.HasErrors() { return nil, diags @@ -392,7 +392,7 @@ func (c *Context) postPlanValidateMoves(config *configs.Config, stmts []refactor return refactoring.ValidateMoves(stmts, config, allInsts) } -func (c *Context) planWalk(config *configs.Config, prevRunState *states.State, rootVariables InputValues, opts *PlanOpts) (*plans.Plan, tfdiags.Diagnostics) { +func (c *Context) planWalk(config *configs.Config, prevRunState *states.State, opts *PlanOpts) (*plans.Plan, tfdiags.Diagnostics) { var diags tfdiags.Diagnostics log.Printf("[DEBUG] Building and walking plan graph for %s", opts.Mode) @@ -419,11 +419,10 @@ func (c *Context) planWalk(config *configs.Config, prevRunState *states.State, r // we can now walk. changes := plans.NewChanges() walker, walkDiags := c.walk(graph, walkOp, &graphWalkOpts{ - Config: config, - InputState: prevRunState, - Changes: changes, - MoveResults: moveResults, - RootVariableValues: rootVariables, + Config: config, + InputState: prevRunState, + Changes: changes, + MoveResults: moveResults, }) diags = diags.Append(walker.NonFatalDiagnostics) diags = diags.Append(walkDiags) @@ -469,34 +468,37 @@ func (c *Context) planGraph(config *configs.Config, prevRunState *states.State, switch mode := opts.Mode; mode { case plans.NormalMode: graph, diags := (&PlanGraphBuilder{ - Config: config, - State: prevRunState, - Plugins: c.plugins, - Targets: opts.Targets, - ForceReplace: opts.ForceReplace, - Validate: validate, - skipRefresh: opts.SkipRefresh, + Config: config, + State: prevRunState, + RootVariableValues: opts.SetVariables, + Plugins: c.plugins, + Targets: opts.Targets, + ForceReplace: opts.ForceReplace, + Validate: validate, + skipRefresh: opts.SkipRefresh, }).Build(addrs.RootModuleInstance) return graph, walkPlan, diags case plans.RefreshOnlyMode: graph, diags := (&PlanGraphBuilder{ - Config: config, - State: prevRunState, - Plugins: c.plugins, - Targets: opts.Targets, - Validate: validate, - skipRefresh: opts.SkipRefresh, - skipPlanChanges: true, // this activates "refresh only" mode. + Config: config, + State: prevRunState, + RootVariableValues: opts.SetVariables, + Plugins: c.plugins, + Targets: opts.Targets, + Validate: validate, + skipRefresh: opts.SkipRefresh, + skipPlanChanges: true, // this activates "refresh only" mode. }).Build(addrs.RootModuleInstance) return graph, walkPlan, diags case plans.DestroyMode: graph, diags := (&DestroyPlanGraphBuilder{ - Config: config, - State: prevRunState, - Plugins: c.plugins, - Targets: opts.Targets, - Validate: validate, - skipRefresh: opts.SkipRefresh, + Config: config, + State: prevRunState, + RootVariableValues: opts.SetVariables, + Plugins: c.plugins, + Targets: opts.Targets, + Validate: validate, + skipRefresh: opts.SkipRefresh, }).Build(addrs.RootModuleInstance) return graph, walkPlanDestroy, diags default: diff --git a/internal/terraform/context_validate.go b/internal/terraform/context_validate.go index fb54be420..4fb02f767 100644 --- a/internal/terraform/context_validate.go +++ b/internal/terraform/context_validate.go @@ -37,17 +37,6 @@ func (c *Context) Validate(config *configs.Config) tfdiags.Diagnostics { log.Printf("[DEBUG] Building and walking validate graph") - graph, moreDiags := ValidateGraphBuilder(&PlanGraphBuilder{ - Config: config, - Plugins: c.plugins, - Validate: true, - State: states.NewState(), - }).Build(addrs.RootModuleInstance) - diags = diags.Append(moreDiags) - if moreDiags.HasErrors() { - return diags - } - // Validate is to check if the given module is valid regardless of // input values, current state, etc. Therefore we populate all of the // input values with unknown values of the expected type, allowing us @@ -66,9 +55,20 @@ func (c *Context) Validate(config *configs.Config) tfdiags.Diagnostics { } } - walker, walkDiags := c.walk(graph, walkValidate, &graphWalkOpts{ + graph, moreDiags := ValidateGraphBuilder(&PlanGraphBuilder{ Config: config, + Plugins: c.plugins, + Validate: true, + State: states.NewState(), RootVariableValues: varValues, + }).Build(addrs.RootModuleInstance) + diags = diags.Append(moreDiags) + if moreDiags.HasErrors() { + return diags + } + + walker, walkDiags := c.walk(graph, walkValidate, &graphWalkOpts{ + Config: config, }) diags = diags.Append(walker.NonFatalDiagnostics) diags = diags.Append(walkDiags) diff --git a/internal/terraform/context_walk.go b/internal/terraform/context_walk.go index 166341513..e8b506314 100644 --- a/internal/terraform/context_walk.go +++ b/internal/terraform/context_walk.go @@ -23,8 +23,7 @@ type graphWalkOpts struct { Changes *plans.Changes Config *configs.Config - RootVariableValues InputValues - MoveResults refactoring.MoveResults + MoveResults refactoring.MoveResults } func (c *Context) walk(graph *Graph, operation walkOperation, opts *graphWalkOpts) (*ContextGraphWalker, tfdiags.Diagnostics) { @@ -98,16 +97,15 @@ func (c *Context) graphWalker(operation walkOperation, opts *graphWalkOpts) *Con } return &ContextGraphWalker{ - Context: c, - State: state, - Config: opts.Config, - RefreshState: refreshState, - PrevRunState: prevRunState, - Changes: changes.SyncWrapper(), - InstanceExpander: instances.NewExpander(), - MoveResults: opts.MoveResults, - Operation: operation, - StopContext: c.runContext, - RootVariableValues: opts.RootVariableValues, + Context: c, + State: state, + Config: opts.Config, + RefreshState: refreshState, + PrevRunState: prevRunState, + Changes: changes.SyncWrapper(), + InstanceExpander: instances.NewExpander(), + MoveResults: opts.MoveResults, + Operation: operation, + StopContext: c.runContext, } } diff --git a/internal/terraform/eval_context.go b/internal/terraform/eval_context.go index 4b5a3a5c2..8a5958ceb 100644 --- a/internal/terraform/eval_context.go +++ b/internal/terraform/eval_context.go @@ -121,12 +121,24 @@ type EvalContext interface { // addresses in this context. EvaluationScope(self addrs.Referenceable, keyData InstanceKeyEvalData) *lang.Scope - // SetModuleCallArguments defines values for the variables of a particular - // child module call. + // SetRootModuleArgument defines the value for one variable of the root + // module. The caller must ensure that given value is a suitable + // "final value" for the variable, which means that it's already converted + // and validated to match any configured constraints and validation rules. // - // Calling this function multiple times has merging behavior, keeping any - // previously-set keys that are not present in the new map. - SetModuleCallArguments(addrs.ModuleCallInstance, map[string]cty.Value) + // Calling this function multiple times with the same variable address + // will silently overwrite the value provided by a previous call. + SetRootModuleArgument(addrs.InputVariable, cty.Value) + + // SetModuleCallArgument defines the value for one input variable of a + // particular child module call. The caller must ensure that the given + // value is a suitable "final value" for the variable, which means that + // it's already converted and validated to match any configured + // constraints and validation rules. + // + // Calling this function multiple times with the same variable address + // will silently overwrite the value provided by a previous call. + SetModuleCallArgument(addrs.ModuleCallInstance, addrs.InputVariable, cty.Value) // GetVariableValue returns the value provided for the input variable with // the given address, or cty.DynamicVal if the variable hasn't been assigned diff --git a/internal/terraform/eval_context_builtin.go b/internal/terraform/eval_context_builtin.go index ecbac446e..35170bcd6 100644 --- a/internal/terraform/eval_context_builtin.go +++ b/internal/terraform/eval_context_builtin.go @@ -313,7 +313,21 @@ func (ctx *BuiltinEvalContext) Path() addrs.ModuleInstance { return ctx.PathValue } -func (ctx *BuiltinEvalContext) SetModuleCallArguments(n addrs.ModuleCallInstance, vals map[string]cty.Value) { +func (ctx *BuiltinEvalContext) SetRootModuleArgument(addr addrs.InputVariable, v cty.Value) { + ctx.VariableValuesLock.Lock() + defer ctx.VariableValuesLock.Unlock() + + log.Printf("[TRACE] BuiltinEvalContext: Storing final value for variable %s", addr.Absolute(addrs.RootModuleInstance)) + key := addrs.RootModuleInstance.String() + args := ctx.VariableValues[key] + if args == nil { + args = make(map[string]cty.Value) + ctx.VariableValues[key] = args + } + args[addr.Name] = v +} + +func (ctx *BuiltinEvalContext) SetModuleCallArgument(callAddr addrs.ModuleCallInstance, varAddr addrs.InputVariable, v cty.Value) { ctx.VariableValuesLock.Lock() defer ctx.VariableValuesLock.Unlock() @@ -321,18 +335,15 @@ func (ctx *BuiltinEvalContext) SetModuleCallArguments(n addrs.ModuleCallInstance panic("context path not set") } - childPath := n.ModuleInstance(ctx.PathValue) + childPath := callAddr.ModuleInstance(ctx.PathValue) + log.Printf("[TRACE] BuiltinEvalContext: Storing final value for variable %s", varAddr.Absolute(childPath)) key := childPath.String() - args := ctx.VariableValues[key] if args == nil { - ctx.VariableValues[key] = vals - return - } - - for k, v := range vals { - args[k] = v + args = make(map[string]cty.Value) + ctx.VariableValues[key] = args } + args[varAddr.Name] = v } func (ctx *BuiltinEvalContext) GetVariableValue(addr addrs.AbsInputVariableInstance) cty.Value { diff --git a/internal/terraform/eval_context_mock.go b/internal/terraform/eval_context_mock.go index edcdaac6b..8dd6ec334 100644 --- a/internal/terraform/eval_context_mock.go +++ b/internal/terraform/eval_context_mock.go @@ -111,13 +111,21 @@ type MockEvalContext struct { PathCalled bool PathPath addrs.ModuleInstance - SetModuleCallArgumentsCalled bool - SetModuleCallArgumentsModule addrs.ModuleCallInstance - SetModuleCallArgumentsValues map[string]cty.Value + SetRootModuleArgumentCalled bool + SetRootModuleArgumentAddr addrs.InputVariable + SetRootModuleArgumentValue cty.Value + SetRootModuleArgumentFunc func(addr addrs.InputVariable, v cty.Value) + + SetModuleCallArgumentCalled bool + SetModuleCallArgumentModuleCall addrs.ModuleCallInstance + SetModuleCallArgumentVariable addrs.InputVariable + SetModuleCallArgumentValue cty.Value + SetModuleCallArgumentFunc func(callAddr addrs.ModuleCallInstance, varAddr addrs.InputVariable, v cty.Value) GetVariableValueCalled bool GetVariableValueAddr addrs.AbsInputVariableInstance GetVariableValueValue cty.Value + GetVariableValueFunc func(addr addrs.AbsInputVariableInstance) cty.Value // supersedes GetVariableValueValue ChangesCalled bool ChangesChanges *plans.ChangesSync @@ -321,15 +329,31 @@ func (c *MockEvalContext) Path() addrs.ModuleInstance { return c.PathPath } -func (c *MockEvalContext) SetModuleCallArguments(n addrs.ModuleCallInstance, values map[string]cty.Value) { - c.SetModuleCallArgumentsCalled = true - c.SetModuleCallArgumentsModule = n - c.SetModuleCallArgumentsValues = values +func (c *MockEvalContext) SetRootModuleArgument(addr addrs.InputVariable, v cty.Value) { + c.SetRootModuleArgumentCalled = true + c.SetRootModuleArgumentAddr = addr + c.SetRootModuleArgumentValue = v + if c.SetRootModuleArgumentFunc != nil { + c.SetRootModuleArgumentFunc(addr, v) + } +} + +func (c *MockEvalContext) SetModuleCallArgument(callAddr addrs.ModuleCallInstance, varAddr addrs.InputVariable, v cty.Value) { + c.SetModuleCallArgumentCalled = true + c.SetModuleCallArgumentModuleCall = callAddr + c.SetModuleCallArgumentVariable = varAddr + c.SetModuleCallArgumentValue = v + if c.SetModuleCallArgumentFunc != nil { + c.SetModuleCallArgumentFunc(callAddr, varAddr, v) + } } func (c *MockEvalContext) GetVariableValue(addr addrs.AbsInputVariableInstance) cty.Value { c.GetVariableValueCalled = true c.GetVariableValueAddr = addr + if c.GetVariableValueFunc != nil { + return c.GetVariableValueFunc(addr) + } return c.GetVariableValueValue } diff --git a/internal/terraform/eval_variable.go b/internal/terraform/eval_variable.go index bdfadd2e9..1c16069ad 100644 --- a/internal/terraform/eval_variable.go +++ b/internal/terraform/eval_variable.go @@ -12,6 +12,102 @@ import ( "github.com/zclconf/go-cty/cty/convert" ) +func prepareFinalInputVariableValue(addr addrs.AbsInputVariableInstance, given cty.Value, valRange tfdiags.SourceRange, cfg *configs.Variable) (cty.Value, tfdiags.Diagnostics) { + var diags tfdiags.Diagnostics + + convertTy := cfg.ConstraintType + log.Printf("[TRACE] prepareFinalInputVariableValue: preparing %s", addr) + + var defaultVal cty.Value + if cfg.Default != cty.NilVal { + log.Printf("[TRACE] prepareFinalInputVariableValue: %s has a default value", addr) + var err error + defaultVal, err = convert.Convert(cfg.Default, convertTy) + if err != nil { + // Validation of the declaration should typically catch this, + // but we'll check it here too to be robust. + diags = diags.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Invalid default value for module argument", + Detail: fmt.Sprintf( + "The default value for variable %q is incompatible with its type constraint: %s.", + cfg.Name, err, + ), + Subject: &cfg.DeclRange, + }) + // We'll return a placeholder unknown value to avoid producing + // redundant downstream errors. + return cty.UnknownVal(cfg.Type), diags + } + } + + if given == cty.NilVal { // The variable wasn't set at all (even to null) + log.Printf("[TRACE] prepareFinalInputVariableValue: %s has no defined value", addr) + if cfg.Required() { + // NOTE: The CLI layer typically checks for itself whether all of + // the required _root_ module variables are not set, which would + // mask this error. We can get here for child module variables, + // though. + diags = diags.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: `Required variable not set`, + Detail: fmt.Sprintf(`The variable %q is required, but is not set.`, addr.Variable.Name), + Subject: valRange.ToHCL().Ptr(), + }) + // We'll return a placeholder unknown value to avoid producing + // redundant downstream errors. + return cty.UnknownVal(cfg.Type), diags + } + + given = defaultVal // must be set, because we checked above that the variable isn't required + } + + val, err := convert.Convert(given, convertTy) + if err != nil { + diags = diags.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Invalid value for module argument", + Detail: fmt.Sprintf( + "The given value is not suitable for child module variable %q defined at %s: %s.", + cfg.Name, cfg.DeclRange.String(), err, + ), + Subject: valRange.ToHCL().Ptr(), + }) + // We'll return a placeholder unknown value to avoid producing + // redundant downstream errors. + return cty.UnknownVal(cfg.Type), diags + } + + // By the time we get here, we know: + // - val matches the variable's type constraint + // - val is definitely not cty.NilVal, but might be a null value if the given was already null. + // + // That means we just need to handle the case where the value is null, + // which might mean we need to use the default value, or produce an error. + // + // For historical reasons we do this only for a "non-nullable" variable. + // Nullable variables just appear as null if they were set to null, + // regardless of any default value. + if val.IsNull() && !cfg.Nullable { + log.Printf("[TRACE] prepareFinalInputVariableValue: %s is defined as null", addr) + if defaultVal != cty.NilVal { + val = defaultVal + } else { + diags = diags.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: `Required variable not set`, + Detail: fmt.Sprintf(`The variable %q is required, but the given value is null.`, addr.Variable.Name), + Subject: valRange.ToHCL().Ptr(), + }) + // Stub out our return value so that the semantic checker doesn't + // produce redundant downstream errors. + val = cty.UnknownVal(cfg.Type) + } + } + + return val, diags +} + // evalVariableValidations ensures that all of the configured custom validations // for a variable are passing. // @@ -20,9 +116,10 @@ import ( // EvalModuleCallArgument for variables in descendent modules. func evalVariableValidations(addr addrs.AbsInputVariableInstance, config *configs.Variable, expr hcl.Expression, ctx EvalContext) (diags tfdiags.Diagnostics) { if config == nil || len(config.Validations) == 0 { - log.Printf("[TRACE] evalVariableValidations: not active for %s, so skipping", addr) + log.Printf("[TRACE] evalVariableValidations: no validation rules declared for %s, so skipping", addr) return nil } + log.Printf("[TRACE] evalVariableValidations: validating %s", addr) // Variable nodes evaluate in the parent module to where they were declared // because the value expression (n.Expr, if set) comes from the calling @@ -34,6 +131,14 @@ func evalVariableValidations(addr addrs.AbsInputVariableInstance, config *config // evaluation context containing just the required value, and thus avoid // the problem that ctx's evaluation functions refer to the wrong module. val := ctx.GetVariableValue(addr) + if val == cty.NilVal { + diags = diags.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "No final value for variable", + Detail: fmt.Sprintf("Terraform doesn't have a final value for %s during validation. This is a bug in Terraform; please report it!", addr), + }) + return diags + } hclCtx := &hcl.EvalContext{ Variables: map[string]cty.Value{ "var": cty.ObjectVal(map[string]cty.Value{ diff --git a/internal/terraform/eval_variable_test.go b/internal/terraform/eval_variable_test.go new file mode 100644 index 000000000..fa4048fed --- /dev/null +++ b/internal/terraform/eval_variable_test.go @@ -0,0 +1,426 @@ +package terraform + +import ( + "fmt" + "testing" + + "github.com/zclconf/go-cty/cty" + + "github.com/hashicorp/terraform/internal/addrs" + "github.com/hashicorp/terraform/internal/tfdiags" +) + +func TestPrepareFinalInputVariableValue(t *testing.T) { + // This is just a concise way to define a bunch of *configs.Variable + // objects to use in our tests below. We're only going to decode this + // config, not fully evaluate it. + cfgSrc := ` + variable "nullable_required" { + } + variable "nullable_optional_default_string" { + default = "hello" + } + variable "nullable_optional_default_null" { + default = null + } + variable "constrained_string_nullable_required" { + type = string + } + variable "constrained_string_nullable_optional_default_string" { + type = string + default = "hello" + } + variable "constrained_string_nullable_optional_default_bool" { + type = string + default = true + } + variable "constrained_string_nullable_optional_default_null" { + type = string + default = null + } + variable "required" { + nullable = false + } + variable "optional_default_string" { + nullable = false + default = "hello" + } + variable "constrained_string_required" { + nullable = false + type = string + } + variable "constrained_string_optional_default_string" { + nullable = false + type = string + default = "hello" + } + variable "constrained_string_optional_default_bool" { + nullable = false + type = string + default = true + } + ` + cfg := testModuleInline(t, map[string]string{ + "main.tf": cfgSrc, + }) + variableConfigs := cfg.Module.Variables + + tests := []struct { + varName string + given cty.Value + want cty.Value + wantErr string + }{ + // nullable_required + { + "nullable_required", + cty.NilVal, + cty.UnknownVal(cty.DynamicPseudoType), + `Required variable not set: The variable "nullable_required" is required, but is not set.`, + }, + { + "nullable_required", + cty.NullVal(cty.DynamicPseudoType), + cty.NullVal(cty.DynamicPseudoType), + ``, // "required" for a nullable variable means only that it must be set, even if it's set to null + }, + { + "nullable_required", + cty.StringVal("ahoy"), + cty.StringVal("ahoy"), + ``, + }, + { + "nullable_required", + cty.UnknownVal(cty.String), + cty.UnknownVal(cty.String), + ``, + }, + + // nullable_optional_default_string + { + "nullable_optional_default_string", + cty.NilVal, + cty.StringVal("hello"), // the declared default value + ``, + }, + { + "nullable_optional_default_string", + cty.NullVal(cty.DynamicPseudoType), + cty.NullVal(cty.DynamicPseudoType), // nullable variables can be really set to null, masking the default + ``, + }, + { + "nullable_optional_default_string", + cty.StringVal("ahoy"), + cty.StringVal("ahoy"), + ``, + }, + { + "nullable_optional_default_string", + cty.UnknownVal(cty.String), + cty.UnknownVal(cty.String), + ``, + }, + + // nullable_optional_default_null + { + "nullable_optional_default_null", + cty.NilVal, + cty.NullVal(cty.DynamicPseudoType), // the declared default value + ``, + }, + { + "nullable_optional_default_null", + cty.NullVal(cty.String), + cty.NullVal(cty.String), // nullable variables can be really set to null, masking the default + ``, + }, + { + "nullable_optional_default_null", + cty.StringVal("ahoy"), + cty.StringVal("ahoy"), + ``, + }, + { + "nullable_optional_default_null", + cty.UnknownVal(cty.String), + cty.UnknownVal(cty.String), + ``, + }, + + // constrained_string_nullable_required + { + "constrained_string_nullable_required", + cty.NilVal, + cty.UnknownVal(cty.String), + `Required variable not set: The variable "constrained_string_nullable_required" is required, but is not set.`, + }, + { + "constrained_string_nullable_required", + cty.NullVal(cty.DynamicPseudoType), + cty.NullVal(cty.String), // the null value still gets converted to match the type constraint + ``, // "required" for a nullable variable means only that it must be set, even if it's set to null + }, + { + "constrained_string_nullable_required", + cty.StringVal("ahoy"), + cty.StringVal("ahoy"), + ``, + }, + { + "constrained_string_nullable_required", + cty.UnknownVal(cty.String), + cty.UnknownVal(cty.String), + ``, + }, + + // constrained_string_nullable_optional_default_string + { + "constrained_string_nullable_optional_default_string", + cty.NilVal, + cty.StringVal("hello"), // the declared default value + ``, + }, + { + "constrained_string_nullable_optional_default_string", + cty.NullVal(cty.DynamicPseudoType), + cty.NullVal(cty.String), // nullable variables can be really set to null, masking the default + ``, + }, + { + "constrained_string_nullable_optional_default_string", + cty.StringVal("ahoy"), + cty.StringVal("ahoy"), + ``, + }, + { + "constrained_string_nullable_optional_default_string", + cty.UnknownVal(cty.String), + cty.UnknownVal(cty.String), + ``, + }, + + // constrained_string_nullable_optional_default_bool + { + "constrained_string_nullable_optional_default_bool", + cty.NilVal, + cty.StringVal("true"), // the declared default value, automatically converted to match type constraint + ``, + }, + { + "constrained_string_nullable_optional_default_bool", + cty.NullVal(cty.DynamicPseudoType), + cty.NullVal(cty.String), // nullable variables can be really set to null, masking the default + ``, + }, + { + "constrained_string_nullable_optional_default_bool", + cty.StringVal("ahoy"), + cty.StringVal("ahoy"), + ``, + }, + { + "constrained_string_nullable_optional_default_bool", + cty.UnknownVal(cty.String), + cty.UnknownVal(cty.String), + ``, + }, + + // constrained_string_nullable_optional_default_null + { + "constrained_string_nullable_optional_default_null", + cty.NilVal, + cty.NullVal(cty.String), + ``, + }, + { + "constrained_string_nullable_optional_default_null", + cty.NullVal(cty.DynamicPseudoType), + cty.NullVal(cty.String), + ``, + }, + { + "constrained_string_nullable_optional_default_null", + cty.StringVal("ahoy"), + cty.StringVal("ahoy"), + ``, + }, + { + "constrained_string_nullable_optional_default_null", + cty.UnknownVal(cty.String), + cty.UnknownVal(cty.String), + ``, + }, + + // required + { + "required", + cty.NilVal, + cty.UnknownVal(cty.DynamicPseudoType), + `Required variable not set: The variable "required" is required, but is not set.`, + }, + { + "required", + cty.NullVal(cty.DynamicPseudoType), + cty.UnknownVal(cty.DynamicPseudoType), + `Required variable not set: The variable "required" is required, but the given value is null.`, + }, + { + "required", + cty.StringVal("ahoy"), + cty.StringVal("ahoy"), + ``, + }, + { + "required", + cty.UnknownVal(cty.String), + cty.UnknownVal(cty.String), + ``, + }, + + // optional_default_string + { + "optional_default_string", + cty.NilVal, + cty.StringVal("hello"), // the declared default value + ``, + }, + { + "optional_default_string", + cty.NullVal(cty.DynamicPseudoType), + cty.StringVal("hello"), // the declared default value + ``, + }, + { + "optional_default_string", + cty.StringVal("ahoy"), + cty.StringVal("ahoy"), + ``, + }, + { + "optional_default_string", + cty.UnknownVal(cty.String), + cty.UnknownVal(cty.String), + ``, + }, + + // constrained_string_required + { + "constrained_string_required", + cty.NilVal, + cty.UnknownVal(cty.String), + `Required variable not set: The variable "constrained_string_required" is required, but is not set.`, + }, + { + "constrained_string_required", + cty.NullVal(cty.DynamicPseudoType), + cty.UnknownVal(cty.String), + `Required variable not set: The variable "constrained_string_required" is required, but the given value is null.`, + }, + { + "constrained_string_required", + cty.StringVal("ahoy"), + cty.StringVal("ahoy"), + ``, + }, + { + "constrained_string_required", + cty.UnknownVal(cty.String), + cty.UnknownVal(cty.String), + ``, + }, + + // constrained_string_optional_default_string + { + "constrained_string_optional_default_string", + cty.NilVal, + cty.StringVal("hello"), // the declared default value + ``, + }, + { + "constrained_string_optional_default_string", + cty.NullVal(cty.DynamicPseudoType), + cty.StringVal("hello"), // the declared default value + ``, + }, + { + "constrained_string_optional_default_string", + cty.StringVal("ahoy"), + cty.StringVal("ahoy"), + ``, + }, + { + "constrained_string_optional_default_string", + cty.UnknownVal(cty.String), + cty.UnknownVal(cty.String), + ``, + }, + + // constrained_string_optional_default_bool + { + "constrained_string_optional_default_bool", + cty.NilVal, + cty.StringVal("true"), // the declared default value, automatically converted to match type constraint + ``, + }, + { + "constrained_string_optional_default_bool", + cty.NullVal(cty.DynamicPseudoType), + cty.StringVal("true"), // the declared default value, automatically converted to match type constraint + ``, + }, + { + "constrained_string_optional_default_bool", + cty.StringVal("ahoy"), + cty.StringVal("ahoy"), + ``, + }, + { + "constrained_string_optional_default_bool", + cty.UnknownVal(cty.String), + cty.UnknownVal(cty.String), + ``, + }, + } + + for _, test := range tests { + t.Run(fmt.Sprintf("%s %#v", test.varName, test.given), func(t *testing.T) { + varAddr := addrs.InputVariable{Name: test.varName}.Absolute(addrs.RootModuleInstance) + varCfg := variableConfigs[test.varName] + if varCfg == nil { + t.Fatalf("invalid variable name %q", test.varName) + } + + t.Logf( + "test case\nvariable: %s\nconstraint: %#v\ndefault: %#v\nnullable: %#v\ngiven value: %#v", + varAddr, + varCfg.Type, + varCfg.Default, + varCfg.Nullable, + test.given, + ) + + got, diags := prepareFinalInputVariableValue( + varAddr, test.given, tfdiags.SourceRangeFromHCL(varCfg.DeclRange), varCfg, + ) + + if test.wantErr != "" { + if !diags.HasErrors() { + t.Errorf("unexpected success\nwant error: %s", test.wantErr) + } else if got, want := diags.Err().Error(), test.wantErr; got != want { + t.Errorf("wrong error\ngot: %s\nwant: %s", got, want) + } + } else { + if diags.HasErrors() { + t.Errorf("unexpected error\ngot: %s", diags.Err().Error()) + } + } + + // NOTE: should still have returned some reasonable value even if there was an error + if !test.want.RawEquals(got) { + t.Fatalf("wrong result\ngot: %#v\nwant: %#v", got, test.want) + } + }) + } +} diff --git a/internal/terraform/evaluate.go b/internal/terraform/evaluate.go index 322ef6fda..243335df2 100644 --- a/internal/terraform/evaluate.go +++ b/internal/terraform/evaluate.go @@ -10,7 +10,6 @@ import ( "github.com/agext/levenshtein" "github.com/hashicorp/hcl/v2" "github.com/zclconf/go-cty/cty" - "github.com/zclconf/go-cty/cty/convert" "github.com/hashicorp/terraform/internal/addrs" "github.com/hashicorp/terraform/internal/configs" @@ -248,7 +247,7 @@ func (d *evaluationStateData) GetInputVariable(addr addrs.InputVariable, rng tfd // This is important because otherwise the validation walk will tend to be // overly strict, requiring expressions throughout the configuration to // be complicated to accommodate all possible inputs, whereas returning - // known here allows for simpler patterns like using input values as + // unknown here allows for simpler patterns like using input values as // guards to broadly enable/disable resources, avoid processing things // that are disabled, etc. Terraform's static validation leans towards // being liberal in what it accepts because the subsequent plan walk has @@ -267,28 +266,27 @@ func (d *evaluationStateData) GetInputVariable(addr addrs.InputVariable, rng tfd return cty.UnknownVal(config.Type), diags } + // d.Evaluator.VariableValues should always contain valid "final values" + // for variables, which is to say that they have already had type + // conversions, validations, and default value handling applied to them. + // Those are the responsibility of the graph notes representing the + // variable declarations. Therefore here we just trust that we already + // have a correct value. + val, isSet := vals[addr.Name] - switch { - case !isSet: - // The config loader will ensure there is a default if the value is not - // set at all. - val = config.Default - - case val.IsNull() && !config.Nullable && config.Default != cty.NilVal: - // If nullable=false a null value will use the configured default. - val = config.Default - } - - var err error - val, err = convert.Convert(val, config.ConstraintType) - if err != nil { - // We should never get here because this problem should've been caught - // during earlier validation, but we'll do something reasonable anyway. + if !isSet { + // We should not be able to get here without having a valid value + // for every variable, so this always indicates a bug in either + // the graph builder (not including all the needed nodes) or in + // the graph nodes representing variables. diags = diags.Append(&hcl.Diagnostic{ Severity: hcl.DiagError, - Summary: `Incorrect variable type`, - Detail: fmt.Sprintf(`The resolved value of variable %q is not appropriate: %s.`, addr.Name, err), - Subject: &config.DeclRange, + Summary: `Reference to unresolved input variable`, + Detail: fmt.Sprintf( + `The final value for %s is missing in Terraform's evaluation context. This is a bug in Terraform; please report it!`, + addr.Absolute(d.ModulePath), + ), + Subject: rng.ToHCL().Ptr(), }) val = cty.UnknownVal(config.Type) } diff --git a/internal/terraform/graph_builder_apply.go b/internal/terraform/graph_builder_apply.go index 75f9d3d4a..86d825560 100644 --- a/internal/terraform/graph_builder_apply.go +++ b/internal/terraform/graph_builder_apply.go @@ -26,6 +26,11 @@ type ApplyGraphBuilder struct { // State is the current state State *states.State + // RootVariableValues are the root module input variables captured as + // part of the plan object, which we must reproduce in the apply step + // to get a consistent result. + RootVariableValues InputValues + // Plugins is a library of the plug-in components (providers and // provisioners) available for use. Plugins *contextPlugins @@ -88,7 +93,7 @@ func (b *ApplyGraphBuilder) Steps() []GraphTransformer { }, // Add dynamic values - &RootVariableTransformer{Config: b.Config}, + &RootVariableTransformer{Config: b.Config, RawValues: b.RootVariableValues}, &ModuleVariableTransformer{Config: b.Config}, &LocalTransformer{Config: b.Config}, &OutputTransformer{Config: b.Config, Changes: b.Changes}, diff --git a/internal/terraform/graph_builder_destroy_plan.go b/internal/terraform/graph_builder_destroy_plan.go index 0bac6305e..def1aa373 100644 --- a/internal/terraform/graph_builder_destroy_plan.go +++ b/internal/terraform/graph_builder_destroy_plan.go @@ -23,6 +23,11 @@ type DestroyPlanGraphBuilder struct { // State is the current state State *states.State + // RootVariableValues are the raw input values for root input variables + // given by the caller, which we'll resolve into final values as part + // of the plan walk. + RootVariableValues InputValues + // Plugins is a library of plug-in components (providers and // provisioners) available for use. Plugins *contextPlugins diff --git a/internal/terraform/graph_builder_eval.go b/internal/terraform/graph_builder_eval.go index ee9d6b8e8..78031e21f 100644 --- a/internal/terraform/graph_builder_eval.go +++ b/internal/terraform/graph_builder_eval.go @@ -30,6 +30,11 @@ type EvalGraphBuilder struct { // State is the current state State *states.State + // RootVariableValues are the raw input values for root input variables + // given by the caller, which we'll resolve into final values as part + // of the plan walk. + RootVariableValues InputValues + // Plugins is a library of plug-in components (providers and // provisioners) available for use. Plugins *contextPlugins @@ -60,7 +65,7 @@ func (b *EvalGraphBuilder) Steps() []GraphTransformer { }, // Add dynamic values - &RootVariableTransformer{Config: b.Config}, + &RootVariableTransformer{Config: b.Config, RawValues: b.RootVariableValues}, &ModuleVariableTransformer{Config: b.Config}, &LocalTransformer{Config: b.Config}, &OutputTransformer{Config: b.Config}, diff --git a/internal/terraform/graph_builder_import.go b/internal/terraform/graph_builder_import.go index 9910354cf..d8d609eba 100644 --- a/internal/terraform/graph_builder_import.go +++ b/internal/terraform/graph_builder_import.go @@ -17,6 +17,11 @@ type ImportGraphBuilder struct { // Module is a configuration to build the graph from. See ImportOpts.Config. Config *configs.Config + // RootVariableValues are the raw input values for root input variables + // given by the caller, which we'll resolve into final values as part + // of the plan walk. + RootVariableValues InputValues + // Plugins is a library of plug-in components (providers and // provisioners) available for use. Plugins *contextPlugins @@ -53,7 +58,7 @@ func (b *ImportGraphBuilder) Steps() []GraphTransformer { &ConfigTransformer{Config: config}, // Add dynamic values - &RootVariableTransformer{Config: b.Config}, + &RootVariableTransformer{Config: b.Config, RawValues: b.RootVariableValues}, &ModuleVariableTransformer{Config: b.Config}, &LocalTransformer{Config: b.Config}, &OutputTransformer{Config: b.Config}, diff --git a/internal/terraform/graph_builder_plan.go b/internal/terraform/graph_builder_plan.go index 709b917b6..1b8ce5833 100644 --- a/internal/terraform/graph_builder_plan.go +++ b/internal/terraform/graph_builder_plan.go @@ -28,6 +28,11 @@ type PlanGraphBuilder struct { // State is the current state State *states.State + // RootVariableValues are the raw input values for root input variables + // given by the caller, which we'll resolve into final values as part + // of the plan walk. + RootVariableValues InputValues + // Plugins is a library of plug-in components (providers and // provisioners) available for use. Plugins *contextPlugins @@ -95,7 +100,7 @@ func (b *PlanGraphBuilder) Steps() []GraphTransformer { }, // Add dynamic values - &RootVariableTransformer{Config: b.Config}, + &RootVariableTransformer{Config: b.Config, RawValues: b.RootVariableValues}, &ModuleVariableTransformer{Config: b.Config}, &LocalTransformer{Config: b.Config}, &OutputTransformer{Config: b.Config}, diff --git a/internal/terraform/node_module_variable.go b/internal/terraform/node_module_variable.go index 9f15587ec..321cd8748 100644 --- a/internal/terraform/node_module_variable.go +++ b/internal/terraform/node_module_variable.go @@ -12,7 +12,6 @@ import ( "github.com/hashicorp/terraform/internal/lang" "github.com/hashicorp/terraform/internal/tfdiags" "github.com/zclconf/go-cty/cty" - "github.com/zclconf/go-cty/cty/convert" ) // nodeExpandModuleVariable is the placeholder for an variable that has not yet had @@ -143,35 +142,27 @@ func (n *nodeModuleVariable) ModulePath() addrs.Module { // GraphNodeExecutable func (n *nodeModuleVariable) Execute(ctx EvalContext, op walkOperation) (diags tfdiags.Diagnostics) { - // If we have no value, do nothing - if n.Expr == nil { - return nil - } + log.Printf("[TRACE] nodeModuleVariable: evaluating %s", n.Addr) - // Otherwise, interpolate the value of this variable and set it - // within the variables mapping. - var vals map[string]cty.Value + var val cty.Value var err error switch op { case walkValidate: - vals, err = n.evalModuleCallArgument(ctx, true) + val, err = n.evalModuleCallArgument(ctx, true) diags = diags.Append(err) - if diags.HasErrors() { - return diags - } default: - vals, err = n.evalModuleCallArgument(ctx, false) + val, err = n.evalModuleCallArgument(ctx, false) diags = diags.Append(err) - if diags.HasErrors() { - return diags - } + } + if diags.HasErrors() { + return diags } // Set values for arguments of a child module call, for later retrieval // during expression evaluation. _, call := n.Addr.Module.CallInstance() - ctx.SetModuleCallArguments(call, vals) + ctx.SetModuleCallArgument(call, n.Addr.Variable, val) return evalVariableValidations(n.Addr, n.Config, n.Expr, ctx) } @@ -199,77 +190,45 @@ func (n *nodeModuleVariable) DotNode(name string, opts *dag.DotOpts) *dag.DotNod // validateOnly indicates that this evaluation is only for config // validation, and we will not have any expansion module instance // repetition data. -func (n *nodeModuleVariable) evalModuleCallArgument(ctx EvalContext, validateOnly bool) (map[string]cty.Value, error) { - name := n.Addr.Variable.Name - expr := n.Expr +func (n *nodeModuleVariable) evalModuleCallArgument(ctx EvalContext, validateOnly bool) (cty.Value, error) { + var diags tfdiags.Diagnostics + var givenVal cty.Value + var errSourceRange tfdiags.SourceRange + if expr := n.Expr; expr != nil { + var moduleInstanceRepetitionData instances.RepetitionData - if expr == nil { - // Should never happen, but we'll bail out early here rather than - // crash in case it does. We set no value at all in this case, - // making a subsequent call to EvalContext.SetModuleCallArguments - // a no-op. - log.Printf("[ERROR] attempt to evaluate %s with nil expression", n.Addr.String()) - return nil, nil - } + switch { + case validateOnly: + // the instance expander does not track unknown expansion values, so we + // have to assume all RepetitionData is unknown. + moduleInstanceRepetitionData = instances.RepetitionData{ + CountIndex: cty.UnknownVal(cty.Number), + EachKey: cty.UnknownVal(cty.String), + EachValue: cty.DynamicVal, + } - var moduleInstanceRepetitionData instances.RepetitionData - - switch { - case validateOnly: - // the instance expander does not track unknown expansion values, so we - // have to assume all RepetitionData is unknown. - moduleInstanceRepetitionData = instances.RepetitionData{ - CountIndex: cty.UnknownVal(cty.Number), - EachKey: cty.UnknownVal(cty.String), - EachValue: cty.DynamicVal, + default: + // Get the repetition data for this module instance, + // so we can create the appropriate scope for evaluating our expression + moduleInstanceRepetitionData = ctx.InstanceExpander().GetModuleInstanceRepetitionData(n.ModuleInstance) } - default: - // Get the repetition data for this module instance, - // so we can create the appropriate scope for evaluating our expression - moduleInstanceRepetitionData = ctx.InstanceExpander().GetModuleInstanceRepetitionData(n.ModuleInstance) + scope := ctx.EvaluationScope(nil, moduleInstanceRepetitionData) + val, moreDiags := scope.EvalExpr(expr, cty.DynamicPseudoType) + diags = diags.Append(moreDiags) + if moreDiags.HasErrors() { + return cty.DynamicVal, diags.ErrWithWarnings() + } + givenVal = val + errSourceRange = tfdiags.SourceRangeFromHCL(expr.Range()) + } else { + // We'll use cty.NilVal to represent the variable not being set at all. + givenVal = cty.NilVal + errSourceRange = tfdiags.SourceRangeFromHCL(n.Config.DeclRange) // we use the declaration range as a fallback for an undefined variable } - scope := ctx.EvaluationScope(nil, moduleInstanceRepetitionData) - val, diags := scope.EvalExpr(expr, cty.DynamicPseudoType) + finalVal, moreDiags := prepareFinalInputVariableValue(n.Addr, givenVal, errSourceRange, n.Config) + diags = diags.Append(moreDiags) - // We intentionally passed DynamicPseudoType to EvalExpr above because - // now we can do our own local type conversion and produce an error message - // with better context if it fails. - var convErr error - val, convErr = convert.Convert(val, n.Config.ConstraintType) - if convErr != nil { - diags = diags.Append(&hcl.Diagnostic{ - Severity: hcl.DiagError, - Summary: "Invalid value for module argument", - Detail: fmt.Sprintf( - "The given value is not suitable for child module variable %q defined at %s: %s.", - name, n.Config.DeclRange.String(), convErr, - ), - Subject: expr.Range().Ptr(), - }) - // We'll return a placeholder unknown value to avoid producing - // redundant downstream errors. - val = cty.UnknownVal(n.Config.Type) - } - - // If there is no default, we have to ensure that a null value is allowed - // for this variable. - if n.Config.Default == cty.NilVal && !n.Config.Nullable && val.IsNull() { - // The value cannot be null, and there is no configured default. - diags = diags.Append(&hcl.Diagnostic{ - Severity: hcl.DiagError, - Summary: `Invalid variable value`, - Detail: fmt.Sprintf(`The variable %q is required, but the given value is null.`, n.Addr), - Subject: &n.Config.DeclRange, - }) - // Stub out our return value so that the semantic checker doesn't - // produce redundant downstream errors. - val = cty.UnknownVal(n.Config.Type) - } - - vals := make(map[string]cty.Value) - vals[name] = val - - return vals, diags.ErrWithWarnings() + return finalVal, diags.ErrWithWarnings() } diff --git a/internal/terraform/node_root_variable.go b/internal/terraform/node_root_variable.go index 56ee5149a..d023be350 100644 --- a/internal/terraform/node_root_variable.go +++ b/internal/terraform/node_root_variable.go @@ -1,16 +1,26 @@ package terraform import ( + "log" + "github.com/hashicorp/terraform/internal/addrs" "github.com/hashicorp/terraform/internal/configs" "github.com/hashicorp/terraform/internal/dag" "github.com/hashicorp/terraform/internal/tfdiags" + "github.com/zclconf/go-cty/cty" ) // NodeRootVariable represents a root variable input. type NodeRootVariable struct { Addr addrs.InputVariable Config *configs.Variable + + // RawValue is the value for the variable set from outside Terraform + // Core, such as on the command line, or from an environment variable, + // or similar. This is the raw value that was provided, not yet + // converted or validated, and can be nil for a variable that isn't + // set at all. + RawValue *InputValue } var ( @@ -38,21 +48,56 @@ func (n *NodeRootVariable) ReferenceableAddrs() []addrs.Referenceable { // GraphNodeExecutable func (n *NodeRootVariable) Execute(ctx EvalContext, op walkOperation) tfdiags.Diagnostics { - // We don't actually need to _evaluate_ a root module variable, because - // its value is always constant and already stashed away in our EvalContext. - // However, we might need to run some user-defined validation rules against - // the value. + // Root module variables are special in that they are provided directly + // by the caller (usually, the CLI layer) and so we don't really need to + // evaluate them in the usual sense, but we do need to process the raw + // values given by the caller to match what the module is expecting, and + // make sure the values are valid. + var diags tfdiags.Diagnostics - if n.Config == nil || len(n.Config.Validations) == 0 { - return nil // nothing to do + addr := addrs.RootModuleInstance.InputVariable(n.Addr.Name) + log.Printf("[TRACE] NodeRootVariable: evaluating %s", addr) + + if n.Config == nil { + // Because we build NodeRootVariable from configuration in the normal + // case it's strange to get here, but we tolerate it to allow for + // tests that might not populate the inputs fully. + return nil } - return evalVariableValidations( + var givenVal cty.Value + if n.RawValue != nil { + givenVal = n.RawValue.Value + } else { + // We'll use cty.NilVal to represent the variable not being set at + // all, which for historical reasons is unfortunately different than + // explicitly setting it to null in some cases. + givenVal = cty.NilVal + } + + finalVal, moreDiags := prepareFinalInputVariableValue( + addr, + givenVal, + tfdiags.SourceRangeFromHCL(n.Config.DeclRange), + n.Config, + ) + diags = diags.Append(moreDiags) + if moreDiags.HasErrors() { + // No point in proceeding to validations then, because they'll + // probably fail trying to work with a value of the wrong type. + return diags + } + + ctx.SetRootModuleArgument(addr.Variable, finalVal) + + moreDiags = evalVariableValidations( addrs.RootModuleInstance.InputVariable(n.Addr.Name), n.Config, nil, // not set for root module variables ctx, ) + diags = diags.Append(moreDiags) + return diags } // dag.GraphNodeDotter impl. diff --git a/internal/terraform/node_root_variable_test.go b/internal/terraform/node_root_variable_test.go index bd3d9c2d6..aecb7428a 100644 --- a/internal/terraform/node_root_variable_test.go +++ b/internal/terraform/node_root_variable_test.go @@ -3,26 +3,164 @@ package terraform import ( "testing" + "github.com/hashicorp/hcl/v2" + "github.com/zclconf/go-cty/cty" + "github.com/hashicorp/terraform/internal/addrs" "github.com/hashicorp/terraform/internal/configs" - "github.com/zclconf/go-cty/cty" + "github.com/hashicorp/terraform/internal/lang" ) func TestNodeRootVariableExecute(t *testing.T) { - ctx := new(MockEvalContext) + t.Run("type conversion", func(t *testing.T) { + ctx := new(MockEvalContext) - n := &NodeRootVariable{ - Addr: addrs.InputVariable{Name: "foo"}, - Config: &configs.Variable{ - Name: "foo", - Type: cty.String, - ConstraintType: cty.String, - }, - } + n := &NodeRootVariable{ + Addr: addrs.InputVariable{Name: "foo"}, + Config: &configs.Variable{ + Name: "foo", + Type: cty.String, + ConstraintType: cty.String, + }, + RawValue: &InputValue{ + Value: cty.True, + SourceType: ValueFromUnknown, + }, + } - diags := n.Execute(ctx, walkApply) - if diags.HasErrors() { - t.Fatalf("unexpected error: %s", diags.Err()) - } + diags := n.Execute(ctx, walkApply) + if diags.HasErrors() { + t.Fatalf("unexpected error: %s", diags.Err()) + } + if !ctx.SetRootModuleArgumentCalled { + t.Fatalf("ctx.SetRootModuleArgument wasn't called") + } + if got, want := ctx.SetRootModuleArgumentAddr.String(), "var.foo"; got != want { + t.Errorf("wrong address for ctx.SetRootModuleArgument\ngot: %s\nwant: %s", got, want) + } + if got, want := ctx.SetRootModuleArgumentValue, cty.StringVal("true"); !want.RawEquals(got) { + // NOTE: The given value was cty.Bool but the type constraint was + // cty.String, so it was NodeRootVariable's responsibility to convert + // as part of preparing the "final value". + t.Errorf("wrong value for ctx.SetRootModuleArgument\ngot: %#v\nwant: %#v", got, want) + } + }) + t.Run("validation", func(t *testing.T) { + ctx := new(MockEvalContext) + + // The variable validation function gets called with Terraform's + // built-in functions available, so we need a minimal scope just for + // it to get the functions from. + ctx.EvaluationScopeScope = &lang.Scope{} + + // We need to reimplement a _little_ bit of EvalContextBuiltin logic + // here to get a similar effect with EvalContextMock just to get the + // value to flow through here in a realistic way that'll make this test + // useful. + var finalVal cty.Value + ctx.SetRootModuleArgumentFunc = func(addr addrs.InputVariable, v cty.Value) { + if addr.Name == "foo" { + t.Logf("set %s to %#v", addr.String(), v) + finalVal = v + } + } + ctx.GetVariableValueFunc = func(addr addrs.AbsInputVariableInstance) cty.Value { + if addr.String() != "var.foo" { + return cty.NilVal + } + t.Logf("reading final val for %s (%#v)", addr.String(), finalVal) + return finalVal + } + + n := &NodeRootVariable{ + Addr: addrs.InputVariable{Name: "foo"}, + Config: &configs.Variable{ + Name: "foo", + Type: cty.Number, + ConstraintType: cty.Number, + Validations: []*configs.VariableValidation{ + { + Condition: fakeHCLExpressionFunc(func(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { + // This returns true only if the given variable value + // is exactly cty.Number, which allows us to verify + // that we were given the value _after_ type + // conversion. + // This had previously not been handled correctly, + // as reported in: + // https://github.com/hashicorp/terraform/issues/29899 + vars := ctx.Variables["var"] + if vars == cty.NilVal || !vars.Type().IsObjectType() || !vars.Type().HasAttribute("foo") { + t.Logf("var.foo isn't available") + return cty.False, nil + } + val := vars.GetAttr("foo") + if val == cty.NilVal || val.Type() != cty.Number { + t.Logf("var.foo is %#v; want a number", val) + return cty.False, nil + } + return cty.True, nil + }), + ErrorMessage: "Must be a number.", + }, + }, + }, + RawValue: &InputValue{ + // Note: This is a string, but the variable's type constraint + // is number so it should be converted before use. + Value: cty.StringVal("5"), + SourceType: ValueFromUnknown, + }, + } + + diags := n.Execute(ctx, walkApply) + if diags.HasErrors() { + t.Fatalf("unexpected error: %s", diags.Err()) + } + + if !ctx.SetRootModuleArgumentCalled { + t.Fatalf("ctx.SetRootModuleArgument wasn't called") + } + if got, want := ctx.SetRootModuleArgumentAddr.String(), "var.foo"; got != want { + t.Errorf("wrong address for ctx.SetRootModuleArgument\ngot: %s\nwant: %s", got, want) + } + if got, want := ctx.SetRootModuleArgumentValue, cty.NumberIntVal(5); !want.RawEquals(got) { + // NOTE: The given value was cty.Bool but the type constraint was + // cty.String, so it was NodeRootVariable's responsibility to convert + // as part of preparing the "final value". + t.Errorf("wrong value for ctx.SetRootModuleArgument\ngot: %#v\nwant: %#v", got, want) + } + }) +} + +// fakeHCLExpressionFunc is a fake implementation of hcl.Expression that just +// directly produces a value with direct Go code. +// +// An expression of this type has no references and so it cannot access any +// variables from the EvalContext unless something else arranges for them +// to be guaranteed available. For example, custom variable validations just +// unconditionally have access to the variable they are validating regardless +// of references. +type fakeHCLExpressionFunc func(*hcl.EvalContext) (cty.Value, hcl.Diagnostics) + +var _ hcl.Expression = fakeHCLExpressionFunc(nil) + +func (f fakeHCLExpressionFunc) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { + return f(ctx) +} + +func (f fakeHCLExpressionFunc) Variables() []hcl.Traversal { + return nil +} + +func (f fakeHCLExpressionFunc) Range() hcl.Range { + return hcl.Range{ + Filename: "fake", + Start: hcl.InitialPos, + End: hcl.InitialPos, + } +} + +func (f fakeHCLExpressionFunc) StartRange() hcl.Range { + return f.Range() } diff --git a/internal/terraform/transform_variable.go b/internal/terraform/transform_variable.go index 86bd6a981..4262ea3d6 100644 --- a/internal/terraform/transform_variable.go +++ b/internal/terraform/transform_variable.go @@ -13,6 +13,8 @@ import ( // reach them. type RootVariableTransformer struct { Config *configs.Config + + RawValues InputValues } func (t *RootVariableTransformer) Transform(g *Graph) error { @@ -31,7 +33,8 @@ func (t *RootVariableTransformer) Transform(g *Graph) error { Addr: addrs.InputVariable{ Name: v.Name, }, - Config: v, + Config: v, + RawValue: t.RawValues[v.Name], } g.Add(node) } From 36c4d4c241b3402d7fbf677e4e7850f6ab80e66f Mon Sep 17 00:00:00 2001 From: Martin Atkins Date: Mon, 20 Dec 2021 16:38:52 -0800 Subject: [PATCH 40/68] core and backend: remove redundant handling of default variable values Previously we had three different layers all thinking they were responsible for substituting a default value for an unset root module variable: - the local backend, via logic in backend.ParseVariableValues - the context.Plan function (and other similar functions) trying to preprocess the input variables using terraform.mergeDefaultInputVariableValues . - the newer prepareFinalInputVariableValue, which aims to centralize all of the variable preparation logic so it can be common to both root and child module variables. The second of these was also trying to handle type constraint checking, which is also the responsibility of the central function and not something we need to handle so early. Only the last of these consistently handles both root and child module variables, and so is the one we ought to keep. The others are now redundant and are causing prepareFinalInputVariableValue to get a slightly corrupted view of the caller's chosen variable values. To rectify that, here we remove the two redundant layers altogether and have unset root variables pass through as cty.NilVal all the way to the central prepareFinalInputVariableValue function, which will then handle them in a suitable way which properly respects the "nullable" setting. This commit includes some test changes in the terraform package to make those tests no longer rely on the mergeDefaultInputVariableValues logic we've removed, and to instead explicitly set cty.NilVal for all unset variables to comply with our intended contract for PlanOpts.SetVariables, and similar. (This is so that we can more easily catch bugs in callers where they _don't_ correctly handle input variables; it allows us to distinguish between the caller explicitly marking a variable as unset vs. not describing it at all, where the latter is a bug in the caller.) --- internal/backend/unparsed_value.go | 17 +- internal/backend/unparsed_value_test.go | 2 +- internal/command/jsonplan/plan.go | 43 +++++- internal/terraform/context_apply.go | 15 ++ internal/terraform/context_apply2_test.go | 6 +- internal/terraform/context_apply_test.go | 59 ++++--- internal/terraform/context_eval.go | 2 +- internal/terraform/context_eval_test.go | 4 +- internal/terraform/context_import.go | 2 +- internal/terraform/context_plan.go | 69 ++++++++- internal/terraform/context_plan2_test.go | 2 +- internal/terraform/context_plan_test.go | 15 +- internal/terraform/eval_variable.go | 10 +- internal/terraform/variables.go | 135 ++++++---------- internal/terraform/variables_test.go | 180 +++------------------- 15 files changed, 255 insertions(+), 306 deletions(-) diff --git a/internal/backend/unparsed_value.go b/internal/backend/unparsed_value.go index 91c982582..e7eadea9a 100644 --- a/internal/backend/unparsed_value.go +++ b/internal/backend/unparsed_value.go @@ -164,13 +164,18 @@ func ParseVariableValues(vv map[string]UnparsedVariableValue, decls map[string]* // By this point we should've gathered all of the required root module // variables from one of the many possible sources. We'll now populate - // any we haven't gathered as their defaults and fail if any of the - // missing ones are required. + // any we haven't gathered as unset placeholders which Terraform Core + // can then react to. for name, vc := range decls { if isDefinedAny(name, ret, undeclared) { continue } + // This check is redundant with a check made in Terraform Core when + // processing undeclared variables, but allows us to generate a more + // specific error message which mentions -var and -var-file command + // line options, whereas the one in Terraform Core is more general + // due to supporting both root and child module variables. if vc.Required() { diags = diags.Append(&hcl.Diagnostic{ Severity: hcl.DiagError, @@ -189,8 +194,14 @@ func ParseVariableValues(vv map[string]UnparsedVariableValue, decls map[string]* SourceRange: tfdiags.SourceRangeFromHCL(vc.DeclRange), } } else { + // We're still required to put an entry for this variable + // in the mapping to be explicit to Terraform Core that we + // visited it, but its value will be cty.NilVal to represent + // that it wasn't set at all at this layer, and so Terraform Core + // should substitute a default if available, or generate an error + // if not. ret[name] = &terraform.InputValue{ - Value: vc.Default, + Value: cty.NilVal, SourceType: terraform.ValueFromConfig, SourceRange: tfdiags.SourceRangeFromHCL(vc.DeclRange), } diff --git a/internal/backend/unparsed_value_test.go b/internal/backend/unparsed_value_test.go index 981c84a43..8807d243d 100644 --- a/internal/backend/unparsed_value_test.go +++ b/internal/backend/unparsed_value_test.go @@ -204,7 +204,7 @@ func TestUnparsedValue(t *testing.T) { }, }, "missing2": { - Value: cty.StringVal("default for missing2"), + Value: cty.NilVal, // Terraform Core handles substituting the default SourceType: terraform.ValueFromConfig, SourceRange: tfdiags.SourceRange{ Filename: "fake.tf", diff --git a/internal/command/jsonplan/plan.go b/internal/command/jsonplan/plan.go index 64d77c05a..06ed97961 100644 --- a/internal/command/jsonplan/plan.go +++ b/internal/command/jsonplan/plan.go @@ -118,7 +118,7 @@ func Marshal( output := newPlan() output.TerraformVersion = version.String() - err := output.marshalPlanVariables(p.VariableValues, schemas) + err := output.marshalPlanVariables(p.VariableValues, config.Module.Variables) if err != nil { return nil, fmt.Errorf("error in marshalPlanVariables: %s", err) } @@ -183,11 +183,7 @@ func Marshal( return ret, err } -func (p *plan) marshalPlanVariables(vars map[string]plans.DynamicValue, schemas *terraform.Schemas) error { - if len(vars) == 0 { - return nil - } - +func (p *plan) marshalPlanVariables(vars map[string]plans.DynamicValue, decls map[string]*configs.Variable) error { p.Variables = make(variables, len(vars)) for k, v := range vars { @@ -203,6 +199,41 @@ func (p *plan) marshalPlanVariables(vars map[string]plans.DynamicValue, schemas Value: valJSON, } } + + // In Terraform v1.1 and earlier we had some confusion about which subsystem + // of Terraform was the one responsible for substituting in default values + // for unset module variables, with root module variables being handled in + // three different places while child module variables were only handled + // during the Terraform Core graph walk. + // + // For Terraform v1.2 and later we rationalized that by having the Terraform + // Core graph walk always be responsible for selecting defaults regardless + // of root vs. child module, but unfortunately our earlier accidental + // misbehavior bled out into the public interface by making the defaults + // show up in the "vars" map to this function. Those are now correctly + // omitted (so that the plan file only records the variables _actually_ + // set by the caller) but consumers of the JSON plan format may be depending + // on our old behavior and so we'll fake it here just in time so that + // outside consumers won't see a behavior change. + for name, decl := range decls { + if _, ok := p.Variables[name]; ok { + continue + } + if val := decl.Default; val != cty.NilVal { + valJSON, err := ctyjson.Marshal(val, val.Type()) + if err != nil { + return err + } + p.Variables[name] = &variable{ + Value: valJSON, + } + } + } + + if len(p.Variables) == 0 { + p.Variables = nil // omit this property if there are no variables to describe + } + return nil } diff --git a/internal/terraform/context_apply.go b/internal/terraform/context_apply.go index 42520b03d..d3cd9dc2d 100644 --- a/internal/terraform/context_apply.go +++ b/internal/terraform/context_apply.go @@ -87,6 +87,21 @@ func (c *Context) applyGraph(plan *plans.Plan, config *configs.Config, validate return nil, walkApply, diags } + // The plan.VariableValues field only records variables that were actually + // set by the caller in the PlanOpts, so we may need to provide + // placeholders for any other variables that the user didn't set, in + // which case Terraform will once again use the default value from the + // configuration when we visit these variables during the graph walk. + for name := range config.Module.Variables { + if _, ok := variables[name]; ok { + continue + } + variables[name] = &InputValue{ + Value: cty.NilVal, + SourceType: ValueFromPlan, + } + } + graph, moreDiags := (&ApplyGraphBuilder{ Config: config, Changes: plan.Changes, diff --git a/internal/terraform/context_apply2_test.go b/internal/terraform/context_apply2_test.go index 6b87d409d..512f3dab3 100644 --- a/internal/terraform/context_apply2_test.go +++ b/internal/terraform/context_apply2_test.go @@ -426,7 +426,7 @@ resource "test_resource" "b" { }, }) - plan, diags := ctx.Plan(m, state, DefaultPlanOpts) + plan, diags := ctx.Plan(m, state, SimplePlanOpts(plans.NormalMode, testInputValuesUnset(m.Module.Variables))) assertNoErrors(t, diags) _, diags = ctx.Apply(plan, m) @@ -530,14 +530,14 @@ resource "test_object" "y" { }, }) - plan, diags := ctx.Plan(m, states.NewState(), DefaultPlanOpts) + plan, diags := ctx.Plan(m, states.NewState(), SimplePlanOpts(plans.NormalMode, testInputValuesUnset(m.Module.Variables))) assertNoErrors(t, diags) state, diags := ctx.Apply(plan, m) assertNoErrors(t, diags) // FINAL PLAN: - plan, diags = ctx.Plan(m, state, DefaultPlanOpts) + plan, diags = ctx.Plan(m, state, SimplePlanOpts(plans.NormalMode, testInputValuesUnset(m.Module.Variables))) assertNoErrors(t, diags) // make sure the same marks are compared in the next plan as well diff --git a/internal/terraform/context_apply_test.go b/internal/terraform/context_apply_test.go index 28c5c788b..46dcbd58b 100644 --- a/internal/terraform/context_apply_test.go +++ b/internal/terraform/context_apply_test.go @@ -517,7 +517,7 @@ func TestContext2Apply_mapVarBetweenModules(t *testing.T) { }, }) - plan, diags := ctx.Plan(m, states.NewState(), DefaultPlanOpts) + plan, diags := ctx.Plan(m, states.NewState(), SimplePlanOpts(plans.NormalMode, testInputValuesUnset(m.Module.Variables))) assertNoErrors(t, diags) state, diags := ctx.Apply(plan, m) @@ -2262,7 +2262,7 @@ func TestContext2Apply_countVariable(t *testing.T) { }, }) - plan, diags := ctx.Plan(m, states.NewState(), DefaultPlanOpts) + plan, diags := ctx.Plan(m, states.NewState(), SimplePlanOpts(plans.NormalMode, testInputValuesUnset(m.Module.Variables))) assertNoErrors(t, diags) state, diags := ctx.Apply(plan, m) @@ -2288,7 +2288,7 @@ func TestContext2Apply_countVariableRef(t *testing.T) { }, }) - plan, diags := ctx.Plan(m, states.NewState(), DefaultPlanOpts) + plan, diags := ctx.Plan(m, states.NewState(), SimplePlanOpts(plans.NormalMode, testInputValuesUnset(m.Module.Variables))) assertNoErrors(t, diags) state, diags := ctx.Apply(plan, m) @@ -2327,7 +2327,7 @@ func TestContext2Apply_provisionerInterpCount(t *testing.T) { Provisioners: provisioners, }) - plan, diags := ctx.Plan(m, states.NewState(), DefaultPlanOpts) + plan, diags := ctx.Plan(m, states.NewState(), SimplePlanOpts(plans.NormalMode, testInputValuesUnset(m.Module.Variables))) assertNoErrors(t, diags) // We'll marshal and unmarshal the plan here, to ensure that we have @@ -3682,7 +3682,7 @@ func TestContext2Apply_multiVarOrder(t *testing.T) { }, }) - plan, diags := ctx.Plan(m, states.NewState(), DefaultPlanOpts) + plan, diags := ctx.Plan(m, states.NewState(), SimplePlanOpts(plans.NormalMode, testInputValuesUnset(m.Module.Variables))) assertNoErrors(t, diags) state, diags := ctx.Apply(plan, m) @@ -3713,7 +3713,7 @@ func TestContext2Apply_multiVarOrderInterp(t *testing.T) { }, }) - plan, diags := ctx.Plan(m, states.NewState(), DefaultPlanOpts) + plan, diags := ctx.Plan(m, states.NewState(), SimplePlanOpts(plans.NormalMode, testInputValuesUnset(m.Module.Variables))) assertNoErrors(t, diags) state, diags := ctx.Apply(plan, m) @@ -4704,9 +4704,7 @@ func TestContext2Apply_provisionerDestroy(t *testing.T) { }, }) - plan, diags := ctx.Plan(m, state, &PlanOpts{ - Mode: plans.DestroyMode, - }) + plan, diags := ctx.Plan(m, state, SimplePlanOpts(plans.DestroyMode, testInputValuesUnset(m.Module.Variables))) assertNoErrors(t, diags) state, diags = ctx.Apply(plan, m) @@ -4753,9 +4751,7 @@ func TestContext2Apply_provisionerDestroyFail(t *testing.T) { }, }) - plan, diags := ctx.Plan(m, state, &PlanOpts{ - Mode: plans.DestroyMode, - }) + plan, diags := ctx.Plan(m, state, SimplePlanOpts(plans.DestroyMode, testInputValuesUnset(m.Module.Variables))) assertNoErrors(t, diags) state, diags = ctx.Apply(plan, m) @@ -5908,7 +5904,7 @@ func TestContext2Apply_destroyWithModuleVariableAndCountNested(t *testing.T) { }) // First plan and apply a create operation - plan, diags := ctx.Plan(m, states.NewState(), DefaultPlanOpts) + plan, diags := ctx.Plan(m, states.NewState(), SimplePlanOpts(plans.NormalMode, testInputValuesUnset(m.Module.Variables))) assertNoErrors(t, diags) state, diags = ctx.Apply(plan, m) @@ -5929,9 +5925,7 @@ func TestContext2Apply_destroyWithModuleVariableAndCountNested(t *testing.T) { }) // First plan and apply a create operation - plan, diags := ctx.Plan(m, state, &PlanOpts{ - Mode: plans.DestroyMode, - }) + plan, diags := ctx.Plan(m, state, SimplePlanOpts(plans.DestroyMode, testInputValuesUnset(m.Module.Variables))) if diags.HasErrors() { t.Fatalf("destroy plan err: %s", diags.Err()) } @@ -7561,6 +7555,12 @@ func TestContext2Apply_vars(t *testing.T) { Value: cty.StringVal("us-east-1"), SourceType: ValueFromCaller, }, + "bar": &InputValue{ + // This one is not explicitly set but that's okay because it + // has a declared default, which Terraform Core will use instead. + Value: cty.NilVal, + SourceType: ValueFromCaller, + }, "test_list": &InputValue{ Value: cty.ListVal([]cty.Value{ cty.StringVal("Hello"), @@ -7876,7 +7876,7 @@ func TestContext2Apply_issue7824(t *testing.T) { }, }) - plan, diags := ctx.Plan(m, states.NewState(), DefaultPlanOpts) + plan, diags := ctx.Plan(m, states.NewState(), SimplePlanOpts(plans.NormalMode, testInputValuesUnset(m.Module.Variables))) if diags.HasErrors() { t.Fatalf("err: %s", diags.Err()) } @@ -7932,7 +7932,7 @@ func TestContext2Apply_issue5254(t *testing.T) { }, }) - plan, diags := ctx.Plan(m, states.NewState(), DefaultPlanOpts) + plan, diags := ctx.Plan(m, states.NewState(), SimplePlanOpts(plans.NormalMode, testInputValuesUnset(m.Module.Variables))) if diags.HasErrors() { t.Fatalf("err: %s", diags.Err()) } @@ -7951,7 +7951,7 @@ func TestContext2Apply_issue5254(t *testing.T) { }, }) - plan, diags = ctx.Plan(m, state, DefaultPlanOpts) + plan, diags = ctx.Plan(m, state, SimplePlanOpts(plans.NormalMode, testInputValuesUnset(m.Module.Variables))) if diags.HasErrors() { t.Fatalf("err: %s", diags.Err()) } @@ -8845,7 +8845,7 @@ func TestContext2Apply_plannedInterpolatedCount(t *testing.T) { Providers: Providers, }) - plan, diags := ctx.Plan(m, state, DefaultPlanOpts) + plan, diags := ctx.Plan(m, state, SimplePlanOpts(plans.NormalMode, testInputValuesUnset(m.Module.Variables))) if diags.HasErrors() { t.Fatalf("plan failed: %s", diags.Err()) } @@ -8904,9 +8904,7 @@ func TestContext2Apply_plannedDestroyInterpolatedCount(t *testing.T) { Providers: providers, }) - plan, diags := ctx.Plan(m, state, &PlanOpts{ - Mode: plans.DestroyMode, - }) + plan, diags := ctx.Plan(m, state, SimplePlanOpts(plans.DestroyMode, testInputValuesUnset(m.Module.Variables))) if diags.HasErrors() { t.Fatalf("plan failed: %s", diags.Err()) } @@ -9674,7 +9672,7 @@ func TestContext2Apply_plannedConnectionRefs(t *testing.T) { Hooks: []Hook{hook}, }) - plan, diags := ctx.Plan(m, states.NewState(), DefaultPlanOpts) + plan, diags := ctx.Plan(m, states.NewState(), SimplePlanOpts(plans.NormalMode, testInputValuesUnset(m.Module.Variables))) diags.HasErrors() if diags.HasErrors() { t.Fatalf("diags: %s", diags.Err()) @@ -11687,7 +11685,7 @@ resource "test_resource" "foo" { }, }) - plan, diags := ctx.Plan(m, states.NewState(), DefaultPlanOpts) + plan, diags := ctx.Plan(m, states.NewState(), SimplePlanOpts(plans.NormalMode, testInputValuesUnset(m.Module.Variables))) assertNoErrors(t, diags) state, diags := ctx.Apply(plan, m) @@ -11702,7 +11700,7 @@ resource "test_resource" "foo" { }, }) - plan, diags = ctx.Plan(m, state, DefaultPlanOpts) + plan, diags = ctx.Plan(m, state, SimplePlanOpts(plans.NormalMode, testInputValuesUnset(m.Module.Variables))) assertNoErrors(t, diags) state, diags = ctx.Apply(plan, m) @@ -11720,6 +11718,7 @@ resource "test_resource" "foo" { plan, diags = ctx.Plan(m, state, &PlanOpts{ Mode: plans.NormalMode, SetVariables: InputValues{ + "sensitive_id": &InputValue{Value: cty.NilVal}, "sensitive_var": &InputValue{ Value: cty.StringVal("bar"), }, @@ -11759,7 +11758,7 @@ resource "test_resource" "foo" { }, }) - plan, diags := ctx.Plan(m, states.NewState(), DefaultPlanOpts) + plan, diags := ctx.Plan(m, states.NewState(), SimplePlanOpts(plans.NormalMode, testInputValuesUnset(m.Module.Variables))) if diags.HasErrors() { t.Fatalf("plan errors: %s", diags.Err()) } @@ -11904,7 +11903,7 @@ resource "test_resource" "foo" { ) }) - plan, diags := ctx.Plan(m, state, DefaultPlanOpts) + plan, diags := ctx.Plan(m, state, SimplePlanOpts(plans.NormalMode, testInputValuesUnset(m.Module.Variables))) assertNoErrors(t, diags) addr := mustResourceInstanceAddr("test_resource.foo") @@ -11954,7 +11953,7 @@ resource "test_resource" "foo" { // but this seems rather suspicious and we should ideally figure out what // this test was originally intending to do and make it do that. oldPlan := plan - _, diags = ctx2.Plan(m2, state, DefaultPlanOpts) + _, diags = ctx2.Plan(m2, state, SimplePlanOpts(plans.NormalMode, testInputValuesUnset(m.Module.Variables))) assertNoErrors(t, diags) stateWithoutSensitive, diags := ctx.Apply(oldPlan, m) assertNoErrors(t, diags) @@ -12206,7 +12205,7 @@ func TestContext2Apply_dataSensitive(t *testing.T) { }, }) - plan, diags := ctx.Plan(m, states.NewState(), DefaultPlanOpts) + plan, diags := ctx.Plan(m, states.NewState(), SimplePlanOpts(plans.NormalMode, testInputValuesUnset(m.Module.Variables))) if diags.HasErrors() { t.Fatalf("diags: %s", diags.Err()) } else { diff --git a/internal/terraform/context_eval.go b/internal/terraform/context_eval.go index c6af77635..f9d0f6493 100644 --- a/internal/terraform/context_eval.go +++ b/internal/terraform/context_eval.go @@ -45,7 +45,7 @@ func (c *Context) Eval(config *configs.Config, state *states.State, moduleAddr a state = state.DeepCopy() var walker *ContextGraphWalker - variables := mergeDefaultInputVariableValues(opts.SetVariables, config.Module.Variables) + variables := opts.SetVariables // By the time we get here, we should have values defined for all of // the root module variables, even if some of them are "unknown". It's the diff --git a/internal/terraform/context_eval_test.go b/internal/terraform/context_eval_test.go index dff687983..0bd752935 100644 --- a/internal/terraform/context_eval_test.go +++ b/internal/terraform/context_eval_test.go @@ -54,7 +54,9 @@ func TestContextEval(t *testing.T) { }, }) - scope, diags := ctx.Eval(m, states.NewState(), addrs.RootModuleInstance, &EvalOpts{}) + scope, diags := ctx.Eval(m, states.NewState(), addrs.RootModuleInstance, &EvalOpts{ + SetVariables: testInputValuesUnset(m.Module.Variables), + }) if diags.HasErrors() { t.Fatalf("Eval errors: %s", diags.Err()) } diff --git a/internal/terraform/context_import.go b/internal/terraform/context_import.go index b5417405f..d809d6bb9 100644 --- a/internal/terraform/context_import.go +++ b/internal/terraform/context_import.go @@ -53,7 +53,7 @@ func (c *Context) Import(config *configs.Config, prevRunState *states.State, opt log.Printf("[DEBUG] Building and walking import graph") - variables := mergeDefaultInputVariableValues(opts.SetVariables, config.Module.Variables) + variables := opts.SetVariables // Initialize our graph builder builder := &ImportGraphBuilder{ diff --git a/internal/terraform/context_plan.go b/internal/terraform/context_plan.go index 0b3c97f14..f114b2a5f 100644 --- a/internal/terraform/context_plan.go +++ b/internal/terraform/context_plan.go @@ -21,10 +21,42 @@ import ( // PlanOpts are the various options that affect the details of how Terraform // will build a plan. type PlanOpts struct { - Mode plans.Mode - SkipRefresh bool + // Mode defines what variety of plan the caller wishes to create. + // Refer to the documentation of the plans.Mode type and its values + // for more information. + Mode plans.Mode + + // SkipRefresh specifies to trust that the current values for managed + // resource instances in the prior state are accurate and to therefore + // disable the usual step of fetching updated values for each resource + // instance using its corresponding provider. + SkipRefresh bool + + // SetVariables are the raw values for root module variables as provided + // by the user who is requesting the run, prior to any normalization or + // substitution of defaults. See the documentation for the InputValue + // type for more information on how to correctly populate this. SetVariables InputValues - Targets []addrs.Targetable + + // If Targets has a non-zero length then it activates targeted planning + // mode, where Terraform will take actions only for resource instances + // mentioned in this set and any other objects those resource instances + // depend on. + // + // Targeted planning mode is intended for exceptional use only, + // and so populating this field will cause Terraform to generate extra + // warnings as part of the planning result. + Targets []addrs.Targetable + + // ForceReplace is a set of resource instance addresses whose corresponding + // objects should be forced planned for replacement if the provider's + // plan would otherwise have been to either update the object in-place or + // to take no action on it at all. + // + // A typical use of this argument is to ask Terraform to replace an object + // which the user has determined is somehow degraded (via information from + // outside of Terraform), thereby hopefully replacing it with a + // fully-functional new object. ForceReplace []addrs.AbsResourceInstance } @@ -99,8 +131,6 @@ func (c *Context) Plan(config *configs.Config, prevRunState *states.State, opts return nil, diags } - variables := mergeDefaultInputVariableValues(opts.SetVariables, config.Module.Variables) - // By the time we get here, we should have values defined for all of // the root module variables, even if some of them are "unknown". It's the // caller's responsibility to have already handled the decoding of these @@ -108,7 +138,7 @@ func (c *Context) Plan(config *configs.Config, prevRunState *states.State, opts // user-friendly error messages if they are not all present, and so // the error message from checkInputVariables should never be seen and // includes language asking the user to report a bug. - varDiags := checkInputVariables(config.Module.Variables, variables) + varDiags := checkInputVariables(config.Module.Variables, opts.SetVariables) diags = diags.Append(varDiags) if len(opts.Targets) > 0 { @@ -139,8 +169,12 @@ The -target option is not for routine use, and is provided only for exceptional } // convert the variables into the format expected for the plan - varVals := make(map[string]plans.DynamicValue, len(variables)) - for k, iv := range variables { + varVals := make(map[string]plans.DynamicValue, len(opts.SetVariables)) + for k, iv := range opts.SetVariables { + if iv.Value == cty.NilVal { + continue // We only record values that the caller actually set + } + // We use cty.DynamicPseudoType here so that we'll save both the // value _and_ its dynamic type in the plan, so we can recover // exactly the same value later. @@ -172,6 +206,25 @@ var DefaultPlanOpts = &PlanOpts{ Mode: plans.NormalMode, } +// SimplePlanOpts is a constructor to help with creating "simple" values of +// PlanOpts which only specify a mode and input variables. +// +// This helper function is primarily intended for use in straightforward +// tests that don't need any of the more "esoteric" planning options. For +// handling real user requests to run Terraform, it'd probably be better +// to construct a *PlanOpts value directly and provide a way for the user +// to set values for all of its fields. +// +// The "mode" and "setVariables" arguments become the values of the "Mode" +// and "SetVariables" fields in the result. Refer to the PlanOpts type +// documentation to learn about the meanings of those fields. +func SimplePlanOpts(mode plans.Mode, setVariables InputValues) *PlanOpts { + return &PlanOpts{ + Mode: mode, + SetVariables: setVariables, + } +} + func (c *Context) plan(config *configs.Config, prevRunState *states.State, opts *PlanOpts) (*plans.Plan, tfdiags.Diagnostics) { var diags tfdiags.Diagnostics diff --git a/internal/terraform/context_plan2_test.go b/internal/terraform/context_plan2_test.go index 006e9e932..d1771b1f3 100644 --- a/internal/terraform/context_plan2_test.go +++ b/internal/terraform/context_plan2_test.go @@ -205,7 +205,7 @@ data "test_data_source" "foo" { }, }) - plan, diags := ctx.Plan(m, state, DefaultPlanOpts) + plan, diags := ctx.Plan(m, state, SimplePlanOpts(plans.NormalMode, testInputValuesUnset(m.Module.Variables))) assertNoErrors(t, diags) for _, res := range plan.Changes.Resources { diff --git a/internal/terraform/context_plan_test.go b/internal/terraform/context_plan_test.go index cfd51da8c..9cf7e875e 100644 --- a/internal/terraform/context_plan_test.go +++ b/internal/terraform/context_plan_test.go @@ -405,7 +405,7 @@ func TestContext2Plan_moduleExpand(t *testing.T) { }, }) - plan, diags := ctx.Plan(m, states.NewState(), DefaultPlanOpts) + plan, diags := ctx.Plan(m, states.NewState(), SimplePlanOpts(plans.NormalMode, testInputValuesUnset(m.Module.Variables))) if diags.HasErrors() { t.Fatalf("unexpected errors: %s", diags.Err()) } @@ -1175,7 +1175,7 @@ func TestContext2Plan_moduleProviderVar(t *testing.T) { }, }) - plan, diags := ctx.Plan(m, states.NewState(), DefaultPlanOpts) + plan, diags := ctx.Plan(m, states.NewState(), SimplePlanOpts(plans.NormalMode, testInputValuesUnset(m.Module.Variables))) if diags.HasErrors() { t.Fatalf("unexpected errors: %s", diags.Err()) } @@ -2242,7 +2242,7 @@ func TestContext2Plan_countModuleStatic(t *testing.T) { }, }) - plan, diags := ctx.Plan(m, states.NewState(), DefaultPlanOpts) + plan, diags := ctx.Plan(m, states.NewState(), SimplePlanOpts(plans.NormalMode, testInputValuesUnset(m.Module.Variables))) if diags.HasErrors() { t.Fatalf("unexpected errors: %s", diags.Err()) } @@ -2295,7 +2295,7 @@ func TestContext2Plan_countModuleStaticGrandchild(t *testing.T) { }, }) - plan, diags := ctx.Plan(m, states.NewState(), DefaultPlanOpts) + plan, diags := ctx.Plan(m, states.NewState(), SimplePlanOpts(plans.NormalMode, testInputValuesUnset(m.Module.Variables))) if diags.HasErrors() { t.Fatalf("unexpected errors: %s", diags.Err()) } @@ -3938,7 +3938,7 @@ func TestContext2Plan_taintDestroyInterpolatedCountRace(t *testing.T) { }, }) - plan, diags := ctx.Plan(m, state.DeepCopy(), DefaultPlanOpts) + plan, diags := ctx.Plan(m, state.DeepCopy(), SimplePlanOpts(plans.NormalMode, testInputValuesUnset(m.Module.Variables))) if diags.HasErrors() { t.Fatalf("unexpected errors: %s", diags.Err()) } @@ -5481,7 +5481,7 @@ func TestContext2Plan_variableSensitivity(t *testing.T) { }, }) - plan, diags := ctx.Plan(m, states.NewState(), DefaultPlanOpts) + plan, diags := ctx.Plan(m, states.NewState(), SimplePlanOpts(plans.NormalMode, testInputValuesUnset(m.Module.Variables))) if diags.HasErrors() { t.Fatalf("unexpected errors: %s", diags.Err()) } @@ -5544,6 +5544,7 @@ func TestContext2Plan_variableSensitivityModule(t *testing.T) { plan, diags := ctx.Plan(m, states.NewState(), &PlanOpts{ Mode: plans.NormalMode, SetVariables: InputValues{ + "sensitive_var": {Value: cty.NilVal}, "another_var": &InputValue{ Value: cty.StringVal("boop"), SourceType: ValueFromCaller, @@ -6657,7 +6658,7 @@ resource "test_resource" "foo" { }, ) }) - plan, diags := ctx.Plan(m, state, DefaultPlanOpts) + plan, diags := ctx.Plan(m, state, SimplePlanOpts(plans.NormalMode, testInputValuesUnset(m.Module.Variables))) if diags.HasErrors() { t.Fatal(diags.Err()) } diff --git a/internal/terraform/eval_variable.go b/internal/terraform/eval_variable.go index 1c16069ad..bbafbe11c 100644 --- a/internal/terraform/eval_variable.go +++ b/internal/terraform/eval_variable.go @@ -45,9 +45,11 @@ func prepareFinalInputVariableValue(addr addrs.AbsInputVariableInstance, given c log.Printf("[TRACE] prepareFinalInputVariableValue: %s has no defined value", addr) if cfg.Required() { // NOTE: The CLI layer typically checks for itself whether all of - // the required _root_ module variables are not set, which would - // mask this error. We can get here for child module variables, - // though. + // the required _root_ module variables are set, which would + // mask this error with a more specific one that refers to the + // CLI features for setting such variables. We can get here for + // child module variables, though. + log.Printf("[ERROR] prepareFinalInputVariableValue: %s is required but is not set", addr) diags = diags.Append(&hcl.Diagnostic{ Severity: hcl.DiagError, Summary: `Required variable not set`, @@ -64,6 +66,7 @@ func prepareFinalInputVariableValue(addr addrs.AbsInputVariableInstance, given c val, err := convert.Convert(given, convertTy) if err != nil { + log.Printf("[ERROR] prepareFinalInputVariableValue: %s has unsuitable type\n got: %s\n want: %s", addr, given.Type(), convertTy) diags = diags.Append(&hcl.Diagnostic{ Severity: hcl.DiagError, Summary: "Invalid value for module argument", @@ -93,6 +96,7 @@ func prepareFinalInputVariableValue(addr addrs.AbsInputVariableInstance, given c if defaultVal != cty.NilVal { val = defaultVal } else { + log.Printf("[ERROR] prepareFinalInputVariableValue: %s is non-nullable but set to null, and is required", addr) diags = diags.Append(&hcl.Diagnostic{ Severity: hcl.DiagError, Summary: `Required variable not set`, diff --git a/internal/terraform/variables.go b/internal/terraform/variables.go index 7a6ace0ee..f5f03d156 100644 --- a/internal/terraform/variables.go +++ b/internal/terraform/variables.go @@ -3,18 +3,50 @@ package terraform import ( "fmt" - "github.com/hashicorp/hcl/v2" "github.com/zclconf/go-cty/cty" - "github.com/zclconf/go-cty/cty/convert" "github.com/hashicorp/terraform/internal/configs" "github.com/hashicorp/terraform/internal/tfdiags" ) -// InputValue represents a value for a variable in the root module, provided -// as part of the definition of an operation. +// InputValue represents a raw value vor a root module input variable as +// provided by the external caller into a function like terraform.Context.Plan. +// +// InputValue should represent as directly as possible what the user set the +// variable to, without any attempt to convert the value to the variable's +// type constraint or substitute the configured default values for variables +// that wasn't set. Those adjustments will be handled by Terraform Core itself +// as part of performing the requested operation. +// +// A Terraform Core caller must provide an InputValue object for each of the +// variables declared in the root module, even if the end user didn't provide +// an explicit value for some of them. See the Value field documentation for +// how to handle that situation. type InputValue struct { - Value cty.Value + // Value is the raw value as provided by the user as part of the plan + // options, or a corresponding similar data structure for non-plan + // operations. + // + // If a particular variable declared in the root module is _not_ set by + // the user then the caller must still provide an InputValue for it but + // must set Value to cty.NilVal to represent the absense of a value. + // This requirement is to help detect situations where the caller isn't + // correctly detecting and handling all of the declared variables. + // + // For historical reasons it's important that callers distinguish the + // situation of the value not being set at all (cty.NilVal) from the + // situation of it being explicitly set to null (a cty.NullVal result): + // for "nullable" input variables that distinction unfortunately decides + // whether the final value will be the variable's default or will be + // explicitly null. + Value cty.Value + + // SourceType is a high-level category for where the value of Value + // came from, which Terraform Core uses to tailor some of its error + // messages to be more helpful to the user. + // + // Some SourceType values should be accompanied by a populated SourceRange + // value. See that field's documentation below for more information. SourceType ValueSourceType // SourceRange provides source location information for values whose @@ -129,23 +161,6 @@ func (vv InputValues) JustValues() map[string]cty.Value { return ret } -// DefaultVariableValues returns an InputValues map representing the default -// values specified for variables in the given configuration map. -func DefaultVariableValues(configs map[string]*configs.Variable) InputValues { - ret := make(InputValues) - for k, c := range configs { - if c.Default == cty.NilVal { - continue - } - ret[k] = &InputValue{ - Value: c.Default, - SourceType: ValueFromConfig, - SourceRange: tfdiags.SourceRangeFromHCL(c.DeclRange), - } - } - return ret -} - // SameValues returns true if the given InputValues has the same values as // the receiever, disregarding the source types and source ranges. // @@ -227,21 +242,15 @@ func (vv InputValues) Identical(other InputValues) bool { return true } -func mergeDefaultInputVariableValues(setVals InputValues, rootVarsConfig map[string]*configs.Variable) InputValues { - var variables InputValues - - // Default variables from the configuration seed our map. - variables = DefaultVariableValues(rootVarsConfig) - - // Variables provided by the caller (from CLI, environment, etc) can - // override the defaults. - variables = variables.Override(setVals) - - return variables -} - -// checkInputVariables ensures that variable values supplied at the UI conform -// to their corresponding declarations in configuration. +// checkInputVariables ensures that the caller provided an InputValue +// definition for each root module variable declared in the configuration. +// The caller must provide an InputVariables with keys exactly matching +// the declared variables, though some of them may be marked explicitly +// unset by their values being cty.NilVal. +// +// This doesn't perform any type checking, default value substitution, or +// validation checks. Those are all handled during a graph walk when we +// visit the graph nodes representing each root variable. // // The set of values is considered valid only if the returned diagnostics // does not contain errors. A valid set of values may still produce warnings, @@ -249,11 +258,12 @@ func mergeDefaultInputVariableValues(setVals InputValues, rootVarsConfig map[str func checkInputVariables(vcs map[string]*configs.Variable, vs InputValues) tfdiags.Diagnostics { var diags tfdiags.Diagnostics - for name, vc := range vcs { - val, isSet := vs[name] + for name := range vcs { + _, isSet := vs[name] if !isSet { - // Always an error, since the caller should already have included - // default values from the configuration in the values map. + // Always an error, since the caller should have produced an + // item with Value: cty.NilVal to be explicit that it offered + // an opportunity to set this variable. diags = diags.Append(tfdiags.Sourceless( tfdiags.Error, "Unassigned variable", @@ -261,49 +271,6 @@ func checkInputVariables(vcs map[string]*configs.Variable, vs InputValues) tfdia )) continue } - - // A given value is valid if it can convert to the desired type. - _, err := convert.Convert(val.Value, vc.ConstraintType) - if err != nil { - switch val.SourceType { - case ValueFromConfig, ValueFromAutoFile, ValueFromNamedFile: - // We have source location information for these. - diags = diags.Append(&hcl.Diagnostic{ - Severity: hcl.DiagError, - Summary: "Invalid value for input variable", - Detail: fmt.Sprintf("The given value is not valid for variable %q: %s.", name, err), - Subject: val.SourceRange.ToHCL().Ptr(), - }) - case ValueFromEnvVar: - diags = diags.Append(tfdiags.Sourceless( - tfdiags.Error, - "Invalid value for input variable", - fmt.Sprintf("The environment variable TF_VAR_%s does not contain a valid value for variable %q: %s.", name, name, err), - )) - case ValueFromCLIArg: - diags = diags.Append(tfdiags.Sourceless( - tfdiags.Error, - "Invalid value for input variable", - fmt.Sprintf("The argument -var=\"%s=...\" does not contain a valid value for variable %q: %s.", name, name, err), - )) - case ValueFromInput: - diags = diags.Append(tfdiags.Sourceless( - tfdiags.Error, - "Invalid value for input variable", - fmt.Sprintf("The value entered for variable %q is not valid: %s.", name, err), - )) - default: - // The above gets us good coverage for the situations users - // are likely to encounter with their own inputs. The other - // cases are generally implementation bugs, so we'll just - // use a generic error for these. - diags = diags.Append(tfdiags.Sourceless( - tfdiags.Error, - "Invalid value for input variable", - fmt.Sprintf("The value provided for variable %q is not valid: %s.", name, err), - )) - } - } } // Check for any variables that are assigned without being configured. diff --git a/internal/terraform/variables_test.go b/internal/terraform/variables_test.go index 41decbae2..6e53a9575 100644 --- a/internal/terraform/variables_test.go +++ b/internal/terraform/variables_test.go @@ -3,166 +3,10 @@ package terraform import ( "testing" - "github.com/davecgh/go-spew/spew" - "github.com/hashicorp/terraform/internal/tfdiags" - - "github.com/go-test/deep" + "github.com/hashicorp/terraform/internal/configs" "github.com/zclconf/go-cty/cty" ) -func TestVariables(t *testing.T) { - tests := map[string]struct { - Module string - Override map[string]cty.Value - Want InputValues - }{ - "config only": { - "vars-basic", - nil, - InputValues{ - "a": &InputValue{ - Value: cty.StringVal("foo"), - SourceType: ValueFromConfig, - SourceRange: tfdiags.SourceRange{ - Filename: "testdata/vars-basic/main.tf", - Start: tfdiags.SourcePos{Line: 1, Column: 1, Byte: 0}, - End: tfdiags.SourcePos{Line: 1, Column: 13, Byte: 12}, - }, - }, - "b": &InputValue{ - Value: cty.ListValEmpty(cty.String), - SourceType: ValueFromConfig, - SourceRange: tfdiags.SourceRange{ - Filename: "testdata/vars-basic/main.tf", - Start: tfdiags.SourcePos{Line: 6, Column: 1, Byte: 55}, - End: tfdiags.SourcePos{Line: 6, Column: 13, Byte: 67}, - }, - }, - "c": &InputValue{ - Value: cty.MapValEmpty(cty.String), - SourceType: ValueFromConfig, - SourceRange: tfdiags.SourceRange{ - Filename: "testdata/vars-basic/main.tf", - Start: tfdiags.SourcePos{Line: 11, Column: 1, Byte: 113}, - End: tfdiags.SourcePos{Line: 11, Column: 13, Byte: 125}, - }, - }, - }, - }, - - "override": { - "vars-basic", - map[string]cty.Value{ - "a": cty.StringVal("bar"), - "b": cty.ListVal([]cty.Value{ - cty.StringVal("foo"), - cty.StringVal("bar"), - }), - "c": cty.MapVal(map[string]cty.Value{ - "foo": cty.StringVal("bar"), - }), - }, - InputValues{ - "a": &InputValue{ - Value: cty.StringVal("bar"), - SourceType: ValueFromCaller, - }, - "b": &InputValue{ - Value: cty.ListVal([]cty.Value{ - cty.StringVal("foo"), - cty.StringVal("bar"), - }), - SourceType: ValueFromCaller, - }, - "c": &InputValue{ - Value: cty.MapVal(map[string]cty.Value{ - "foo": cty.StringVal("bar"), - }), - SourceType: ValueFromCaller, - }, - }, - }, - - "bools: config only": { - "vars-basic-bool", - nil, - InputValues{ - "a": &InputValue{ - Value: cty.True, - SourceType: ValueFromConfig, - SourceRange: tfdiags.SourceRange{ - Filename: "testdata/vars-basic-bool/main.tf", - Start: tfdiags.SourcePos{Line: 4, Column: 1, Byte: 177}, - End: tfdiags.SourcePos{Line: 4, Column: 13, Byte: 189}, - }, - }, - "b": &InputValue{ - Value: cty.False, - SourceType: ValueFromConfig, - SourceRange: tfdiags.SourceRange{ - Filename: "testdata/vars-basic-bool/main.tf", - Start: tfdiags.SourcePos{Line: 8, Column: 1, Byte: 214}, - End: tfdiags.SourcePos{Line: 8, Column: 13, Byte: 226}, - }, - }, - }, - }, - - "bools: override with string": { - "vars-basic-bool", - map[string]cty.Value{ - "a": cty.StringVal("foo"), - "b": cty.StringVal("bar"), - }, - InputValues{ - "a": &InputValue{ - Value: cty.StringVal("foo"), - SourceType: ValueFromCaller, - }, - "b": &InputValue{ - Value: cty.StringVal("bar"), - SourceType: ValueFromCaller, - }, - }, - }, - - "bools: override with bool": { - "vars-basic-bool", - map[string]cty.Value{ - "a": cty.False, - "b": cty.True, - }, - InputValues{ - "a": &InputValue{ - Value: cty.False, - SourceType: ValueFromCaller, - }, - "b": &InputValue{ - Value: cty.True, - SourceType: ValueFromCaller, - }, - }, - }, - } - - for name, test := range tests { - // Wrapped in a func so we can get defers to work - t.Run(name, func(t *testing.T) { - m := testModule(t, test.Module) - fromConfig := DefaultVariableValues(m.Module.Variables) - overrides := InputValuesFromCaller(test.Override) - got := fromConfig.Override(overrides) - - if !got.Identical(test.Want) { - t.Errorf("wrong result\ngot: %swant: %s", spew.Sdump(got), spew.Sdump(test.Want)) - } - for _, problem := range deep.Equal(got, test.Want) { - t.Errorf(problem) - } - }) - } -} - func TestCheckInputVariables(t *testing.T) { c := testModule(t, "input-variables") @@ -280,3 +124,25 @@ func TestCheckInputVariables(t *testing.T) { } }) } + +// testInputValuesUnset is a helper for constructing InputValues values for +// situations where all of the root module variables are optional and a +// test case intends to just use those default values and not override them +// at all. +// +// In other words, this constructs an InputValues with one entry per given +// input variable declaration where all of them are declared as unset. +func testInputValuesUnset(decls map[string]*configs.Variable) InputValues { + if len(decls) == 0 { + return nil + } + + ret := make(InputValues, len(decls)) + for name := range decls { + ret[name] = &InputValue{ + Value: cty.NilVal, + SourceType: ValueFromUnknown, + } + } + return ret +} From 9ebc3e1cd2798586505096ee3b7860393fb8af95 Mon Sep 17 00:00:00 2001 From: Martin Atkins Date: Tue, 21 Dec 2021 18:04:24 -0800 Subject: [PATCH 41/68] core: More accurate error message for invalid variable values In earlier Terraform versions we had an extra validation step prior to the graph walk which tried to partially validate root module input variable values (just checking their type constraints) and then return error messages which specified as accurately as possible where the value had originally come from. We're now handling that sort of validation exclusively during the graph walk so that we can share the main logic between both root module and child module variable values, but previously that shared code wasn't able to generate such specific information about where the values had originated, because it was adapted from code originally written to only deal with child module variables. Here then we restore a similar level of detail as before, when we're processing root module variables. For child module variables, we use synthetic InputValue objects which state that the value was declared in the configuration, thus causing us to produce a similar sort of error message as we would've before which includes a source range covering the argument expression in the calling module block. --- internal/terraform/eval_variable.go | 84 +++++++++--- internal/terraform/eval_variable_test.go | 143 ++++++++++++++++++++- internal/terraform/node_module_variable.go | 11 +- internal/terraform/node_root_variable.go | 17 ++- internal/terraform/variables.go | 30 ++++- 5 files changed, 254 insertions(+), 31 deletions(-) diff --git a/internal/terraform/eval_variable.go b/internal/terraform/eval_variable.go index bbafbe11c..fd57a136f 100644 --- a/internal/terraform/eval_variable.go +++ b/internal/terraform/eval_variable.go @@ -12,7 +12,7 @@ import ( "github.com/zclconf/go-cty/cty/convert" ) -func prepareFinalInputVariableValue(addr addrs.AbsInputVariableInstance, given cty.Value, valRange tfdiags.SourceRange, cfg *configs.Variable) (cty.Value, tfdiags.Diagnostics) { +func prepareFinalInputVariableValue(addr addrs.AbsInputVariableInstance, raw *InputValue, cfg *configs.Variable) (cty.Value, tfdiags.Diagnostics) { var diags tfdiags.Diagnostics convertTy := cfg.ConstraintType @@ -41,6 +41,29 @@ func prepareFinalInputVariableValue(addr addrs.AbsInputVariableInstance, given c } } + var sourceRange tfdiags.SourceRange + var nonFileSource string + if raw.HasSourceRange() { + sourceRange = raw.SourceRange + } else { + // If the value came from a place that isn't a file and thus doesn't + // have its own source range, we'll use the declaration range as + // our source range and generate some slightly different error + // messages. + sourceRange = tfdiags.SourceRangeFromHCL(cfg.DeclRange) + switch raw.SourceType { + case ValueFromCLIArg: + nonFileSource = fmt.Sprintf("set using -var=\"%s=...\"", addr.Variable.Name) + case ValueFromEnvVar: + nonFileSource = fmt.Sprintf("set using the TF_VAR_%s environment variable", addr.Variable.Name) + case ValueFromInput: + nonFileSource = "set using an interactive prompt" + default: + nonFileSource = "set from outside of the configuration" + } + } + + given := raw.Value if given == cty.NilVal { // The variable wasn't set at all (even to null) log.Printf("[TRACE] prepareFinalInputVariableValue: %s has no defined value", addr) if cfg.Required() { @@ -54,7 +77,7 @@ func prepareFinalInputVariableValue(addr addrs.AbsInputVariableInstance, given c Severity: hcl.DiagError, Summary: `Required variable not set`, Detail: fmt.Sprintf(`The variable %q is required, but is not set.`, addr.Variable.Name), - Subject: valRange.ToHCL().Ptr(), + Subject: cfg.DeclRange.Ptr(), }) // We'll return a placeholder unknown value to avoid producing // redundant downstream errors. @@ -67,15 +90,27 @@ func prepareFinalInputVariableValue(addr addrs.AbsInputVariableInstance, given c val, err := convert.Convert(given, convertTy) if err != nil { log.Printf("[ERROR] prepareFinalInputVariableValue: %s has unsuitable type\n got: %s\n want: %s", addr, given.Type(), convertTy) - diags = diags.Append(&hcl.Diagnostic{ - Severity: hcl.DiagError, - Summary: "Invalid value for module argument", - Detail: fmt.Sprintf( - "The given value is not suitable for child module variable %q defined at %s: %s.", - cfg.Name, cfg.DeclRange.String(), err, - ), - Subject: valRange.ToHCL().Ptr(), - }) + if nonFileSource != "" { + diags = diags.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Invalid value for input variable", + Detail: fmt.Sprintf( + "Unsuitable value for %s %s: %s.", + addr, nonFileSource, err, + ), + Subject: cfg.DeclRange.Ptr(), + }) + } else { + diags = diags.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Invalid value for input variable", + Detail: fmt.Sprintf( + "The given value is not suitable for %s declared at %s: %s.", + addr, cfg.DeclRange.String(), err, + ), + Subject: sourceRange.ToHCL().Ptr(), + }) + } // We'll return a placeholder unknown value to avoid producing // redundant downstream errors. return cty.UnknownVal(cfg.Type), diags @@ -97,12 +132,27 @@ func prepareFinalInputVariableValue(addr addrs.AbsInputVariableInstance, given c val = defaultVal } else { log.Printf("[ERROR] prepareFinalInputVariableValue: %s is non-nullable but set to null, and is required", addr) - diags = diags.Append(&hcl.Diagnostic{ - Severity: hcl.DiagError, - Summary: `Required variable not set`, - Detail: fmt.Sprintf(`The variable %q is required, but the given value is null.`, addr.Variable.Name), - Subject: valRange.ToHCL().Ptr(), - }) + if nonFileSource != "" { + diags = diags.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: `Required variable not set`, + Detail: fmt.Sprintf( + "Unsuitable value for %s %s: required variable may not be set to null.", + addr, nonFileSource, + ), + Subject: cfg.DeclRange.Ptr(), + }) + } else { + diags = diags.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: `Required variable not set`, + Detail: fmt.Sprintf( + "The given value is not suitable for %s defined at %s: required variable may not be set to null.", + addr, cfg.DeclRange.String(), + ), + Subject: sourceRange.ToHCL().Ptr(), + }) + } // Stub out our return value so that the semantic checker doesn't // produce redundant downstream errors. val = cty.UnknownVal(cfg.Type) diff --git a/internal/terraform/eval_variable_test.go b/internal/terraform/eval_variable_test.go index fa4048fed..0ebea982f 100644 --- a/internal/terraform/eval_variable_test.go +++ b/internal/terraform/eval_variable_test.go @@ -4,6 +4,7 @@ import ( "fmt" "testing" + "github.com/hashicorp/hcl/v2" "github.com/zclconf/go-cty/cty" "github.com/hashicorp/terraform/internal/addrs" @@ -65,6 +66,13 @@ func TestPrepareFinalInputVariableValue(t *testing.T) { }) variableConfigs := cfg.Module.Variables + // Because we loaded our pseudo-module from a temporary file, the + // declaration source ranges will have unpredictable filenames. We'll + // fix that here just to make things easier below. + for _, vc := range variableConfigs { + vc.DeclRange.Filename = "main.tf" + } + tests := []struct { varName string given cty.Value @@ -264,7 +272,7 @@ func TestPrepareFinalInputVariableValue(t *testing.T) { "required", cty.NullVal(cty.DynamicPseudoType), cty.UnknownVal(cty.DynamicPseudoType), - `Required variable not set: The variable "required" is required, but the given value is null.`, + `Required variable not set: Unsuitable value for var.required set from outside of the configuration: required variable may not be set to null.`, }, { "required", @@ -316,7 +324,7 @@ func TestPrepareFinalInputVariableValue(t *testing.T) { "constrained_string_required", cty.NullVal(cty.DynamicPseudoType), cty.UnknownVal(cty.String), - `Required variable not set: The variable "constrained_string_required" is required, but the given value is null.`, + `Required variable not set: Unsuitable value for var.constrained_string_required set from outside of the configuration: required variable may not be set to null.`, }, { "constrained_string_required", @@ -401,8 +409,13 @@ func TestPrepareFinalInputVariableValue(t *testing.T) { test.given, ) + rawVal := &InputValue{ + Value: test.given, + SourceType: ValueFromCaller, + } + got, diags := prepareFinalInputVariableValue( - varAddr, test.given, tfdiags.SourceRangeFromHCL(varCfg.DeclRange), varCfg, + varAddr, rawVal, varCfg, ) if test.wantErr != "" { @@ -423,4 +436,128 @@ func TestPrepareFinalInputVariableValue(t *testing.T) { } }) } + + t.Run("SourceType error message variants", func(t *testing.T) { + tests := []struct { + SourceType ValueSourceType + SourceRange tfdiags.SourceRange + WantTypeErr string + WantNullErr string + }{ + { + ValueFromUnknown, + tfdiags.SourceRange{}, + `Invalid value for input variable: Unsuitable value for var.constrained_string_required set from outside of the configuration: string required.`, + `Required variable not set: Unsuitable value for var.constrained_string_required set from outside of the configuration: required variable may not be set to null.`, + }, + { + ValueFromConfig, + tfdiags.SourceRange{ + Filename: "example.tf", + Start: tfdiags.SourcePos(hcl.InitialPos), + End: tfdiags.SourcePos(hcl.InitialPos), + }, + `Invalid value for input variable: The given value is not suitable for var.constrained_string_required declared at main.tf:32,3-41: string required.`, + `Required variable not set: The given value is not suitable for var.constrained_string_required defined at main.tf:32,3-41: required variable may not be set to null.`, + }, + { + ValueFromAutoFile, + tfdiags.SourceRange{ + Filename: "example.auto.tfvars", + Start: tfdiags.SourcePos(hcl.InitialPos), + End: tfdiags.SourcePos(hcl.InitialPos), + }, + `Invalid value for input variable: The given value is not suitable for var.constrained_string_required declared at main.tf:32,3-41: string required.`, + `Required variable not set: The given value is not suitable for var.constrained_string_required defined at main.tf:32,3-41: required variable may not be set to null.`, + }, + { + ValueFromNamedFile, + tfdiags.SourceRange{ + Filename: "example.tfvars", + Start: tfdiags.SourcePos(hcl.InitialPos), + End: tfdiags.SourcePos(hcl.InitialPos), + }, + `Invalid value for input variable: The given value is not suitable for var.constrained_string_required declared at main.tf:32,3-41: string required.`, + `Required variable not set: The given value is not suitable for var.constrained_string_required defined at main.tf:32,3-41: required variable may not be set to null.`, + }, + { + ValueFromCLIArg, + tfdiags.SourceRange{}, + `Invalid value for input variable: Unsuitable value for var.constrained_string_required set using -var="constrained_string_required=...": string required.`, + `Required variable not set: Unsuitable value for var.constrained_string_required set using -var="constrained_string_required=...": required variable may not be set to null.`, + }, + { + ValueFromEnvVar, + tfdiags.SourceRange{}, + `Invalid value for input variable: Unsuitable value for var.constrained_string_required set using the TF_VAR_constrained_string_required environment variable: string required.`, + `Required variable not set: Unsuitable value for var.constrained_string_required set using the TF_VAR_constrained_string_required environment variable: required variable may not be set to null.`, + }, + { + ValueFromInput, + tfdiags.SourceRange{}, + `Invalid value for input variable: Unsuitable value for var.constrained_string_required set using an interactive prompt: string required.`, + `Required variable not set: Unsuitable value for var.constrained_string_required set using an interactive prompt: required variable may not be set to null.`, + }, + { + // NOTE: This isn't actually a realistic case for this particular + // function, because if we have a value coming from a plan then + // we must be in the apply step, and we shouldn't be able to + // get past the plan step if we have invalid variable values, + // and during planning we'll always have other source types. + ValueFromPlan, + tfdiags.SourceRange{}, + `Invalid value for input variable: Unsuitable value for var.constrained_string_required set from outside of the configuration: string required.`, + `Required variable not set: Unsuitable value for var.constrained_string_required set from outside of the configuration: required variable may not be set to null.`, + }, + { + ValueFromCaller, + tfdiags.SourceRange{}, + `Invalid value for input variable: Unsuitable value for var.constrained_string_required set from outside of the configuration: string required.`, + `Required variable not set: Unsuitable value for var.constrained_string_required set from outside of the configuration: required variable may not be set to null.`, + }, + } + + for _, test := range tests { + t.Run(fmt.Sprintf("%s %s", test.SourceType, test.SourceRange.StartString()), func(t *testing.T) { + varAddr := addrs.InputVariable{Name: "constrained_string_required"}.Absolute(addrs.RootModuleInstance) + varCfg := variableConfigs[varAddr.Variable.Name] + t.Run("type error", func(t *testing.T) { + rawVal := &InputValue{ + Value: cty.EmptyObjectVal, + SourceType: test.SourceType, + SourceRange: test.SourceRange, + } + + _, diags := prepareFinalInputVariableValue( + varAddr, rawVal, varCfg, + ) + if !diags.HasErrors() { + t.Fatalf("unexpected success; want error") + } + + if got, want := diags.Err().Error(), test.WantTypeErr; got != want { + t.Errorf("wrong error\ngot: %s\nwant: %s", got, want) + } + }) + t.Run("null error", func(t *testing.T) { + rawVal := &InputValue{ + Value: cty.NullVal(cty.DynamicPseudoType), + SourceType: test.SourceType, + SourceRange: test.SourceRange, + } + + _, diags := prepareFinalInputVariableValue( + varAddr, rawVal, varCfg, + ) + if !diags.HasErrors() { + t.Fatalf("unexpected success; want error") + } + + if got, want := diags.Err().Error(), test.WantNullErr; got != want { + t.Errorf("wrong error\ngot: %s\nwant: %s", got, want) + } + }) + }) + } + }) } diff --git a/internal/terraform/node_module_variable.go b/internal/terraform/node_module_variable.go index 321cd8748..ae3450be5 100644 --- a/internal/terraform/node_module_variable.go +++ b/internal/terraform/node_module_variable.go @@ -227,7 +227,16 @@ func (n *nodeModuleVariable) evalModuleCallArgument(ctx EvalContext, validateOnl errSourceRange = tfdiags.SourceRangeFromHCL(n.Config.DeclRange) // we use the declaration range as a fallback for an undefined variable } - finalVal, moreDiags := prepareFinalInputVariableValue(n.Addr, givenVal, errSourceRange, n.Config) + // We construct a synthetic InputValue here to pretend as if this were + // a root module variable set from outside, just as a convenience so we + // can reuse the InputValue type for this. + rawVal := &InputValue{ + Value: givenVal, + SourceType: ValueFromConfig, + SourceRange: errSourceRange, + } + + finalVal, moreDiags := prepareFinalInputVariableValue(n.Addr, rawVal, n.Config) diags = diags.Append(moreDiags) return finalVal, diags.ErrWithWarnings() diff --git a/internal/terraform/node_root_variable.go b/internal/terraform/node_root_variable.go index d023be350..33f439d7c 100644 --- a/internal/terraform/node_root_variable.go +++ b/internal/terraform/node_root_variable.go @@ -65,20 +65,23 @@ func (n *NodeRootVariable) Execute(ctx EvalContext, op walkOperation) tfdiags.Di return nil } - var givenVal cty.Value - if n.RawValue != nil { - givenVal = n.RawValue.Value - } else { + givenVal := n.RawValue + if givenVal == nil { // We'll use cty.NilVal to represent the variable not being set at // all, which for historical reasons is unfortunately different than - // explicitly setting it to null in some cases. - givenVal = cty.NilVal + // explicitly setting it to null in some cases. In normal code we + // should never get here because all variables should have raw + // values, but we can get here in some historical tests that call + // in directly and don't necessarily obey the rules. + givenVal = &InputValue{ + Value: cty.NilVal, + SourceType: ValueFromUnknown, + } } finalVal, moreDiags := prepareFinalInputVariableValue( addr, givenVal, - tfdiags.SourceRangeFromHCL(n.Config.DeclRange), n.Config, ) diags = diags.Append(moreDiags) diff --git a/internal/terraform/variables.go b/internal/terraform/variables.go index f5f03d156..a60f18700 100644 --- a/internal/terraform/variables.go +++ b/internal/terraform/variables.go @@ -9,7 +9,7 @@ import ( "github.com/hashicorp/terraform/internal/tfdiags" ) -// InputValue represents a raw value vor a root module input variable as +// InputValue represents a raw value for a root module input variable as // provided by the external caller into a function like terraform.Context.Plan. // // InputValue should represent as directly as possible what the user set the @@ -22,6 +22,11 @@ import ( // variables declared in the root module, even if the end user didn't provide // an explicit value for some of them. See the Value field documentation for // how to handle that situation. +// +// Terraform Core also internally uses InputValue to represent the raw value +// provided for a variable in a child module call, following the same +// conventions. However, that's an implementation detail not visible to +// outside callers. type InputValue struct { // Value is the raw value as provided by the user as part of the plan // options, or a corresponding similar data structure for non-plan @@ -50,8 +55,9 @@ type InputValue struct { SourceType ValueSourceType // SourceRange provides source location information for values whose - // SourceType is either ValueFromConfig or ValueFromFile. It is not - // populated for other source types, and so should not be used. + // SourceType is either ValueFromConfig, ValueFromNamedFile, or + // ValueForNormalFile. It is not populated for other source types, and so + // should not be used. SourceRange tfdiags.SourceRange } @@ -106,6 +112,24 @@ func (v *InputValue) GoString() string { } } +// HasSourceRange returns true if the reciever has a source type for which +// we expect the SourceRange field to be populated with a valid range. +func (v *InputValue) HasSourceRange() bool { + return v.SourceType.HasSourceRange() +} + +// HasSourceRange returns true if the reciever is one of the source types +// that is used along with a valid SourceRange field when appearing inside an +// InputValue object. +func (v ValueSourceType) HasSourceRange() bool { + switch v { + case ValueFromConfig, ValueFromAutoFile, ValueFromNamedFile: + return true + default: + return false + } +} + func (v ValueSourceType) GoString() string { return fmt.Sprintf("terraform.%s", v) } From dabd7567af2940b424b6521ed3564a0fbef1f7e9 Mon Sep 17 00:00:00 2001 From: Martin Atkins Date: Mon, 10 Jan 2022 12:29:10 -0800 Subject: [PATCH 42/68] Update CHANGELOG.md --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 59307b611..86538555d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,10 @@ ENHANCEMENTS: * The "Invalid for_each argument" error message for unknown maps/sets now includes an additional paragraph to try to help the user notice they can move apply-time values into the map _values_ instead of the map _keys_, and thus avoid the problem without resorting to `-target`. [GH-30327] +BUG FIXES: + +* Terraform now handles type constraints, nullability, and custom variable validation properly for root module variables. Previously there was an order of operations problem where the nullability and custom variable validation were checked too early, prior to dealing with the type constraints, and thus that logic could potentially "see" an incorrectly-typed value in spite of the type constraint, leading to incorrect errors. [GH-29959] + ## Previous Releases For information on prior major and minor releases, see their changelogs: From 684ed7505d7aaa85d3eb4460d0ae7aec17b99aa8 Mon Sep 17 00:00:00 2001 From: James Bardin Date: Mon, 10 Jan 2022 16:14:30 -0500 Subject: [PATCH 43/68] remove synthetic default expression for variables Now that variable evaluation checks for a nil expression the graph transformer does not need to generate a synthetic expression for variable defaults. This means that all default handling is now located in one place, and we are not surprised by a configuration expression showing up which doesn't actually exist in the configuration. Rename nodeModuleVariable.evalModuleCallArgument to evalModuleVariable. This method is no longer handling only the module call argument, it is also dealing with the variable declaration defaults and validation statements. Add an additional tests for validation with a non-nullable variable. --- internal/terraform/context_validate_test.go | 33 +++++++++++++++++++ internal/terraform/node_module_variable.go | 8 ++--- .../terraform/transform_module_variable.go | 8 ----- 3 files changed, 37 insertions(+), 12 deletions(-) diff --git a/internal/terraform/context_validate_test.go b/internal/terraform/context_validate_test.go index a02d85cdd..1f0491f1a 100644 --- a/internal/terraform/context_validate_test.go +++ b/internal/terraform/context_validate_test.go @@ -2062,3 +2062,36 @@ output "out" { t.Fatal(diags.ErrWithWarnings()) } } + +func TestContext2Validate_nonNullableVariableDefaultValidation(t *testing.T) { + m := testModuleInline(t, map[string]string{ + "main.tf": ` + module "first" { + source = "./mod" + input = null + } + `, + + "mod/main.tf": ` + variable "input" { + type = string + default = "default" + nullable = false + + // Validation expressions should receive the default with nullable=false and + // a null input. + validation { + condition = var.input != null + error_message = "Input cannot be null!" + } + } + `, + }) + + ctx := testContext2(t, &ContextOpts{}) + + diags := ctx.Validate(m) + if diags.HasErrors() { + t.Fatal(diags.ErrWithWarnings()) + } +} diff --git a/internal/terraform/node_module_variable.go b/internal/terraform/node_module_variable.go index ae3450be5..c5e2294ea 100644 --- a/internal/terraform/node_module_variable.go +++ b/internal/terraform/node_module_variable.go @@ -149,10 +149,10 @@ func (n *nodeModuleVariable) Execute(ctx EvalContext, op walkOperation) (diags t switch op { case walkValidate: - val, err = n.evalModuleCallArgument(ctx, true) + val, err = n.evalModuleVariable(ctx, true) diags = diags.Append(err) default: - val, err = n.evalModuleCallArgument(ctx, false) + val, err = n.evalModuleVariable(ctx, false) diags = diags.Append(err) } if diags.HasErrors() { @@ -178,7 +178,7 @@ func (n *nodeModuleVariable) DotNode(name string, opts *dag.DotOpts) *dag.DotNod } } -// evalModuleCallArgument produces the value for a particular variable as will +// evalModuleVariable produces the value for a particular variable as will // be used by a child module instance. // // The result is written into a map, with its key set to the local name of the @@ -190,7 +190,7 @@ func (n *nodeModuleVariable) DotNode(name string, opts *dag.DotOpts) *dag.DotNod // validateOnly indicates that this evaluation is only for config // validation, and we will not have any expansion module instance // repetition data. -func (n *nodeModuleVariable) evalModuleCallArgument(ctx EvalContext, validateOnly bool) (cty.Value, error) { +func (n *nodeModuleVariable) evalModuleVariable(ctx EvalContext, validateOnly bool) (cty.Value, error) { var diags tfdiags.Diagnostics var givenVal cty.Value var errSourceRange tfdiags.SourceRange diff --git a/internal/terraform/transform_module_variable.go b/internal/terraform/transform_module_variable.go index b739bfd6e..a9fa02c4e 100644 --- a/internal/terraform/transform_module_variable.go +++ b/internal/terraform/transform_module_variable.go @@ -3,7 +3,6 @@ package terraform import ( "fmt" - "github.com/hashicorp/hcl/v2/hclsyntax" "github.com/hashicorp/terraform/internal/addrs" "github.com/hashicorp/terraform/internal/tfdiags" "github.com/zclconf/go-cty/cty" @@ -94,13 +93,6 @@ func (t *ModuleVariableTransformer) transformSingle(g *Graph, parent, c *configs var expr hcl.Expression if attr := content.Attributes[v.Name]; attr != nil { expr = attr.Expr - } else { - // No expression provided for this variable, so we'll make a - // synthetic one using the variable's default value. - expr = &hclsyntax.LiteralValueExpr{ - Val: v.Default, - SrcRange: v.DeclRange, // This is not exact, but close enough - } } // Add a plannable node, as the variable may expand From 1b7e7b967dca992d04a093b3ae1c6b03f6cc267f Mon Sep 17 00:00:00 2001 From: Laura Pacilio <83350965+laurapacilio@users.noreply.github.com> Date: Mon, 10 Jan 2022 17:49:49 -0500 Subject: [PATCH 44/68] Fix broken links to external docs --- .../resources/provisioners/salt-masterless.mdx | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/website/docs/language/resources/provisioners/salt-masterless.mdx b/website/docs/language/resources/provisioners/salt-masterless.mdx index d24532177..70fd17dbe 100644 --- a/website/docs/language/resources/provisioners/salt-masterless.mdx +++ b/website/docs/language/resources/provisioners/salt-masterless.mdx @@ -47,28 +47,26 @@ Optional: Docker builder, you will likely want to pass `true` since `sudo` is often not pre-installed. - `remote_pillar_roots` (string) - The path to your remote [pillar - roots](http://docs.saltstack.com/ref/configuration/master.html#pillar-configuration). + roots](https://docs.saltproject.io/en/latest/ref/configuration/master.html#pillar-configuration). default: `/srv/pillar`. This option cannot be used with `minion_config`. - `remote_state_tree` (string) - The path to your remote [state - tree](http://docs.saltstack.com/ref/states/highstate.html#the-salt-state-tree). + tree](https://docs.saltproject.io/en/latest/ref/states/highstate.html#the-salt-state-tree). default: `/srv/salt`. This option cannot be used with `minion_config`. - `local_pillar_roots` (string) - The path to your local [pillar - roots](http://docs.saltstack.com/ref/configuration/master.html#pillar-configuration). + roots](https://docs.saltproject.io/en/latest/ref/configuration/master.html#pillar-configuration). This will be uploaded to the `remote_pillar_roots` on the remote. - `local_state_tree` (string) - The path to your local [state - tree](http://docs.saltstack.com/ref/states/highstate.html#the-salt-state-tree). + tree](https://docs.saltproject.io/en/latest/ref/states/highstate.html#the-salt-state-tree). This will be uploaded to the `remote_state_tree` on the remote. - `custom_state` (string) - A state to be run instead of `state.highstate`. Defaults to `state.highstate` if unspecified. - `minion_config_file` (string) - The path to your local [minion config - file](http://docs.saltstack.com/ref/configuration/minion.html). This will be - uploaded to the `/etc/salt` on the remote. This option overrides the - `remote_state_tree` or `remote_pillar_roots` options. + file](https://docs.saltproject.io/en/latest/ref/configuration/minion.html). This will be uploaded to the `/etc/salt` on the remote. This option overrides the `remote_state_tree` or `remote_pillar_roots` options. - `skip_bootstrap` (boolean) - By default the salt provisioner runs [salt bootstrap](https://github.com/saltstack/salt-bootstrap) to install salt. Set @@ -83,7 +81,7 @@ Optional: - `log_level` (string) - Set the logging level for the `salt-call` run. - `salt_call_args` (string) - Additional arguments to pass directly to `salt-call`. See - [salt-call](https://docs.saltstack.com/ref/cli/salt-call.html) documentation for more + [salt-call](https://docs.saltproject.io/en/latest/ref/cli/salt-call.html) documentation for more information. By default no additional arguments (besides the ones Terraform generates) are passed to `salt-call`. From beb9432155b7f78122c0b57e66b6ee275ca5e4b8 Mon Sep 17 00:00:00 2001 From: Martin Atkins Date: Mon, 10 Jan 2022 16:44:18 -0800 Subject: [PATCH 45/68] website: CLI state manipulation section de-emphasize old features Previously the "terraform state ..." subcommands were the only way to perform various manipulations of the state, but in recent Terraform versions we have replaced these with better options. Since these pages seem to already have pretty good search engine optimization for the use-cases they are describing, we'll prioritize mentioning the new approaches and only mention the now-deprecated or de-emphasized features as a secondary approach. Specifically: - Describe the -replace=... planning option in preference to "terraform taint", and present taint as primarily a mechanism for Terraform to use itself, as opposed to something end-users should typically use directly. - Introduce the config-based refactoring features before describing "terraform state mv". The older features here are still applicable in some situations and are required for those still using older versions of Terraform, so we will retain the information about them for now while aiming to be clearer in each case about which is our preferred, modern approach. --- website/docs/cli/state/index.mdx | 2 +- website/docs/cli/state/move.mdx | 13 +++++- website/docs/cli/state/taint.mdx | 72 ++++++++++++++++++++++++-------- 3 files changed, 67 insertions(+), 20 deletions(-) diff --git a/website/docs/cli/state/index.mdx b/website/docs/cli/state/index.mdx index 9db8d9252..b3c11b7de 100644 --- a/website/docs/cli/state/index.mdx +++ b/website/docs/cli/state/index.mdx @@ -22,7 +22,7 @@ infrastructure. Terraform CLI supports several workflows for interacting with state: - [Inspecting State](/cli/state/inspect) -- [Forcing Re-creation (Tainting)](/cli/state/taint) +- [Forcing Re-creation](/cli/state/taint) - [Moving Resources](/cli/state/move) - Importing Pre-existing Resources (documented in the [Importing Infrastructure](/cli/import) section) diff --git a/website/docs/cli/state/move.mdx b/website/docs/cli/state/move.mdx index 8b0f9cdae..c69280a90 100644 --- a/website/docs/cli/state/move.mdx +++ b/website/docs/cli/state/move.mdx @@ -21,13 +21,22 @@ In cases where it's important to preserve an existing infrastructure object, you can explicitly tell Terraform to associate it with a different configured resource. +For most cases we recommend using +[the Terraform language's refactoring features](/language/modules/develop/refactoring) +to document in your module exactly how the resource names have changed over +time. Terraform will react to this information automatically during planning, +and thus users of your module will not need to take any unusual extra steps. + +> **Hands On:** Try the [Use Configuration to Move Resources](https://learn.hashicorp.com/tutorials/terraform/move-config) on HashiCorp Learn. + +There are some other situations which require explicit state modifications, +though. For those, consider the following Terraform commands: + - [The `terraform state mv` command](/cli/commands/state/mv) changes which resource address in your configuration is associated with a particular real-world object. Use this to preserve an object when renaming a resource, or when moving a resource into or out of a child module. - > **Hands On:** Try the [Use Configuration to Move Resources](https://learn.hashicorp.com/tutorials/terraform/move-config) on HashiCorp Learn. - - [The `terraform state rm` command](/cli/commands/state/rm) tells Terraform to stop managing a resource as part of the current working directory and workspace, _without_ destroying the corresponding real-world object. (You diff --git a/website/docs/cli/state/taint.mdx b/website/docs/cli/state/taint.mdx index dbe67f906..c4c46ca2f 100644 --- a/website/docs/cli/state/taint.mdx +++ b/website/docs/cli/state/taint.mdx @@ -1,25 +1,63 @@ --- -page_title: Forcing Re-creation of Resources (Tainting) - Terraform CLI +page_title: Forcing Re-creation of Resources - Terraform CLI description: Commands that allow you to destroy and re-create resources manually. --- -# Forcing Re-creation of Resources (Tainting) +# Forcing Re-creation of Resources -When a resource declaration is modified, Terraform usually attempts to update -the existing resource in place (although some changes can require destruction -and re-creation, usually due to upstream API limitations). +During planning, by default Terraform retrieves the latest state of each +existing object and compares it with the current configuration, planning +actions only against objects whose current state does not match the +configuration. -In some cases, you might want a resource to be destroyed and re-created even -when Terraform doesn't think it's necessary. This is usually for objects that -aren't fully described by their resource arguments due to side-effects that -happen during creation; for example, a virtual machine that configures itself -with `cloud-init` on startup might no longer meet your needs if the cloud-init -configuration changes. +However, in some cases a remote object may become damaged or degraded in a +way that Terraform cannot automatically detect. For example, if software +running inside a virtual machine crashes but the virtual machine itself is +still running then Terraform will typically have no way to detect and respond +to the problem, because Terraform only directly manages the machine as a whole. -- [The `terraform taint` command](/cli/commands/taint) tells Terraform to - destroy and re-create a particular resource during the next apply, regardless - of whether its resource arguments would normally require that. +If you know that an object is damaged, or if you want to force Terraform to +replace it for any other reason, you can override Terraform's default behavior +using [the `-replace=...` planning option](/cli/commands/plan#replace-address) +when you run either `terraform plan` or `terraform apply`: -- [The `terraform untaint` command](/cli/commands/untaint) undoes a - previous taint, or can preserve a resource that was automatically tainted due - to failed [provisioners](/language/resources/provisioners/syntax). +```shellsession +$ terraform apply -replace=aws_instance.example +# ... + + # aws_instance.example will be replaced, as requested +-/+ resource "aws_instance" "example" { + # ... + } +``` + +## The "tainted" status + +Sometimes Terraform is able to infer automatically that an object is in an +incomplete or degraded state. For example, if creation of a complex object +fails in such a way that parts of it already exist in the remote system, or +if object creation succeeded but a provisioner step subsequently failed, +Terraform must remember that the object exists but may not be fully-functional. + +Terraform represents this situation by marking an object in the state as +"tainted". When an object is marked with this status, the next plan will force +replacing that object in a similar way to if you had specified that object's +address using `-replace=...` as described above. + +``` + # aws_instance.example is tainted, so must be replaced +-/+ resource "aws_instance" "example" { + # ... + } +``` + +If Terraform has marked an object as tainted but you consider it to be working +correctly and do not want to replace it, you can override Terraform's +determination using [the `terraform untaint` command](/cli/commands/untaint), +after which Terraform will consider the object to be ready for use by any +downstream resource declarations. + +You can also _force_ Terraform to mark a particular object as tainted using +[the `terraform taint` command](/cli/commands/taint), but that approach is +deprecated in favor of the `-replace=...` option, which avoids the need to +create an interim state snapshot with a tainted object. From bdc5f152d7ac2ddbdd7fbbb00422c51d2d833ef0 Mon Sep 17 00:00:00 2001 From: Martin Atkins Date: Mon, 10 Jan 2022 15:36:59 -0800 Subject: [PATCH 46/68] refactoring: Implied move statements can be cross-package Terraform uses "implied" move statements to represent the situation where it automatically handles a switch from count to no-count on a resource. Because that situation requires targeting only a specific resource instance inside a specific module instance, implied move statements are always presented as if they had been declared in the root module and then traversed through the exact module instance path to reach the target resource. However, that means they can potentially cross a module package boundary, if the changed resource belongs to an external module. Normally we prohibit that to avoid the root module depending on implementation details of the called module, but Terraform generates these implied statements based only on information in the called module and so there's no need to apply that same restriction to implied move statements, which will always have source and destination addresses belonging to the same module instance. This change therefore fixes a misbehavior where Terraform would reject an attempt to switch from no-count to count in a called module, where previously the author of the calling configuration had no recourse to fix it because the change has actually happened upstream. --- internal/refactoring/move_validate.go | 49 ++++++++++++--------- internal/refactoring/move_validate_test.go | 51 ++++++++++++++++++++++ 2 files changed, 79 insertions(+), 21 deletions(-) diff --git a/internal/refactoring/move_validate.go b/internal/refactoring/move_validate.go index eedf00414..13f2e50f9 100644 --- a/internal/refactoring/move_validate.go +++ b/internal/refactoring/move_validate.go @@ -55,27 +55,34 @@ func ValidateMoves(stmts []MoveStatement, rootCfg *configs.Config, declaredInsts _, toCallSteps := stmt.To.ModuleCallTraversals() modCfg := rootCfg.Descendent(stmtMod) - if pkgAddr := callsThroughModulePackage(modCfg, fromCallSteps); pkgAddr != nil { - diags = diags.Append(&hcl.Diagnostic{ - Severity: hcl.DiagError, - Summary: "Cross-package move statement", - Detail: fmt.Sprintf( - "This statement declares a move from an object declared in external module package %q. Move statements can be only within a single module package.", - pkgAddr, - ), - Subject: stmt.DeclRange.ToHCL().Ptr(), - }) - } - if pkgAddr := callsThroughModulePackage(modCfg, toCallSteps); pkgAddr != nil { - diags = diags.Append(&hcl.Diagnostic{ - Severity: hcl.DiagError, - Summary: "Cross-package move statement", - Detail: fmt.Sprintf( - "This statement declares a move to an object declared in external module package %q. Move statements can be only within a single module package.", - pkgAddr, - ), - Subject: stmt.DeclRange.ToHCL().Ptr(), - }) + if !stmt.Implied { + // Implied statements can cross module boundaries because we + // generate them only for changing instance keys on a single + // resource. They happen to be generated _as if_ they were written + // in the root module, but the source and destination are always + // in the same module anyway. + if pkgAddr := callsThroughModulePackage(modCfg, fromCallSteps); pkgAddr != nil { + diags = diags.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Cross-package move statement", + Detail: fmt.Sprintf( + "This statement declares a move from an object declared in external module package %q. Move statements can be only within a single module package.", + pkgAddr, + ), + Subject: stmt.DeclRange.ToHCL().Ptr(), + }) + } + if pkgAddr := callsThroughModulePackage(modCfg, toCallSteps); pkgAddr != nil { + diags = diags.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Cross-package move statement", + Detail: fmt.Sprintf( + "This statement declares a move to an object declared in external module package %q. Move statements can be only within a single module package.", + pkgAddr, + ), + Subject: stmt.DeclRange.ToHCL().Ptr(), + }) + } } for _, modInst := range declaredInsts.InstancesForModule(stmtMod) { diff --git a/internal/refactoring/move_validate_test.go b/internal/refactoring/move_validate_test.go index 60122511f..aa4ec4f3b 100644 --- a/internal/refactoring/move_validate_test.go +++ b/internal/refactoring/move_validate_test.go @@ -366,6 +366,50 @@ Each resource can have moved from only one source resource.`, }, WantError: `Cross-package move statement: This statement declares a move to an object declared in external module package "fake-external:///". Move statements can be only within a single module package.`, }, + "implied move from resource in another module package": { + Statements: []MoveStatement{ + makeTestImpliedMoveStmt(t, + ``, + `module.fake_external.test.thing`, + `test.thing`, + ), + }, + // Implied move statements are not subject to the cross-package restriction + WantError: ``, + }, + "implied move to resource in another module package": { + Statements: []MoveStatement{ + makeTestImpliedMoveStmt(t, + ``, + `test.thing`, + `module.fake_external.test.thing`, + ), + }, + // Implied move statements are not subject to the cross-package restriction + WantError: ``, + }, + "implied move from module call in another module package": { + Statements: []MoveStatement{ + makeTestImpliedMoveStmt(t, + ``, + `module.fake_external.module.a`, + `module.b`, + ), + }, + // Implied move statements are not subject to the cross-package restriction + WantError: ``, + }, + "implied move to module call in another module package": { + Statements: []MoveStatement{ + makeTestImpliedMoveStmt(t, + ``, + `module.a`, + `module.fake_external.module.b`, + ), + }, + // Implied move statements are not subject to the cross-package restriction + WantError: ``, + }, "move to a call that refers to another module package": { Statements: []MoveStatement{ makeTestMoveStmt(t, @@ -650,6 +694,13 @@ func makeTestMoveStmt(t *testing.T, moduleStr, fromStr, toStr string) MoveStatem } } +func makeTestImpliedMoveStmt(t *testing.T, moduleStr, fromStr, toStr string) MoveStatement { + t.Helper() + ret := makeTestMoveStmt(t, moduleStr, fromStr, toStr) + ret.Implied = true + return ret +} + var fakeExternalModuleSource = addrs.ModuleSourceRemote{ PackageAddr: addrs.ModulePackage("fake-external:///"), } From e95f29bf9d43bab3198964e5af56e6124506e56f Mon Sep 17 00:00:00 2001 From: Martin Atkins Date: Mon, 10 Jan 2022 16:06:19 -0800 Subject: [PATCH 47/68] lang/funcs: fileexists slightly better "not a file" error message Previously we were just returning a string representation of the file mode, which spends more characters on the irrelevant permission bits that it does on the directory entry type, and is presented in a Unix-centric format that likely won't be familiar to the user of a Windows system. Instead, we'll recognize a few specific directory entry types that seem worth mentioning by name, and then use a generic message for the rest. The original motivation here was actually to deal with the fact that our tests for this function were previously not portable due to the error message leaking system-specific permission detail that are not relevant to the test. Rather than just directly addressing that portability problem, I took the opportunity to improve the error messages at the same time. However, because of that initial focus there are only actually tests here for the directory case. A test that tries to test any of these other file modes would not be portable and in some cases would require superuser access, so we'll just leave those cases untested for the moment since they weren't tested before anyway, and so we've not _lost_ any test coverage here. --- internal/lang/funcs/filesystem.go | 26 ++++++++++++++++++++++++-- internal/lang/funcs/filesystem_test.go | 4 ++-- 2 files changed, 26 insertions(+), 4 deletions(-) diff --git a/internal/lang/funcs/filesystem.go b/internal/lang/funcs/filesystem.go index 01e090a5b..e5de7907c 100644 --- a/internal/lang/funcs/filesystem.go +++ b/internal/lang/funcs/filesystem.go @@ -226,8 +226,30 @@ func MakeFileExistsFunc(baseDir string) function.Function { return cty.True.WithMarks(pathMarks), nil } - return cty.False, fmt.Errorf("%s is not a regular file, but %q", - redactIfSensitive(path, pathMarks), fi.Mode().String()) + // The Go stat API only provides convenient access to whether it's + // a directory or not, so we need to do some bit fiddling to + // recognize other irregular file types. + filename := redactIfSensitive(path, pathMarks) + fileType := fi.Mode().Type() + switch { + case (fileType & os.ModeDir) != 0: + err = function.NewArgErrorf(1, "%s is a directory, not a file", filename) + case (fileType & os.ModeDevice) != 0: + err = function.NewArgErrorf(1, "%s is a device node, not a regular file", filename) + case (fileType & os.ModeNamedPipe) != 0: + err = function.NewArgErrorf(1, "%s is a named pipe, not a regular file", filename) + case (fileType & os.ModeSocket) != 0: + err = function.NewArgErrorf(1, "%s is a unix domain socket, not a regular file", filename) + default: + // If it's not a type we recognize then we'll just return a + // generic error message. This should be very rare. + err = function.NewArgErrorf(1, "%s is not a regular file", filename) + + // Note: os.ModeSymlink should be impossible because we used + // os.Stat above, not os.Lstat. + } + + return cty.False, err }, }) } diff --git a/internal/lang/funcs/filesystem_test.go b/internal/lang/funcs/filesystem_test.go index 4e673984b..037137ae6 100644 --- a/internal/lang/funcs/filesystem_test.go +++ b/internal/lang/funcs/filesystem_test.go @@ -228,12 +228,12 @@ func TestFileExists(t *testing.T) { { cty.StringVal(""), cty.BoolVal(false), - `"." is not a regular file, but "drwxr-xr-x"`, + `"." is a directory, not a file`, }, { cty.StringVal("testdata").Mark(marks.Sensitive), cty.BoolVal(false), - `(sensitive value) is not a regular file, but "drwxr-xr-x"`, + `(sensitive value) is a directory, not a file`, }, { cty.StringVal("testdata/missing"), From 8d1bced8126eb7425168bac0df288d98b0483d91 Mon Sep 17 00:00:00 2001 From: Krista LaFentres Date: Fri, 7 Jan 2022 14:05:56 -0600 Subject: [PATCH 48/68] cli: Refactor show command to remove dependence on local run and only load the backend when we need it See https://github.com/hashicorp/terraform/pull/30205#issuecomment-997113175 for more context --- internal/command/show.go | 164 ++++++++++++++++++--------------------- 1 file changed, 74 insertions(+), 90 deletions(-) diff --git a/internal/command/show.go b/internal/command/show.go index 728ea9872..c3c88ebf6 100644 --- a/internal/command/show.go +++ b/internal/command/show.go @@ -11,10 +11,12 @@ import ( "github.com/hashicorp/terraform/internal/command/jsonplan" "github.com/hashicorp/terraform/internal/command/jsonstate" "github.com/hashicorp/terraform/internal/command/views" + "github.com/hashicorp/terraform/internal/configs" "github.com/hashicorp/terraform/internal/plans" "github.com/hashicorp/terraform/internal/plans/planfile" "github.com/hashicorp/terraform/internal/states/statefile" "github.com/hashicorp/terraform/internal/states/statemgr" + "github.com/hashicorp/terraform/internal/terraform" "github.com/hashicorp/terraform/internal/tfdiags" ) @@ -53,79 +55,17 @@ func (c *ShowCommand) Run(args []string) int { var diags tfdiags.Diagnostics - // Load the backend - b, backendDiags := c.Backend(nil) - diags = diags.Append(backendDiags) - if backendDiags.HasErrors() { - c.showDiagnostics(diags) - return 1 - } - - // We require a local backend - local, ok := b.(backend.Local) - if !ok { - c.showDiagnostics(diags) // in case of any warnings in here - c.Ui.Error(ErrUnsupportedLocalOp) - return 1 - } - - // This is a read-only command - c.ignoreRemoteVersionConflict(b) - - // the show command expects the config dir to always be the cwd - cwd, err := os.Getwd() - if err != nil { - c.Ui.Error(fmt.Sprintf("Error getting cwd: %s", err)) - return 1 - } - - // Determine if a planfile was passed to the command - var planFile *planfile.Reader - if len(args) > 0 { - // We will handle error checking later on - this is just required to - // load the local context if the given path is successfully read as - // a planfile. - planFile, _ = c.PlanFile(args[0]) - } - - // Build the operation - opReq := c.Operation(b) - opReq.ConfigDir = cwd - opReq.PlanFile = planFile - opReq.ConfigLoader, err = c.initConfigLoader() - opReq.AllowUnsetVariables = true - opReq.DisablePlanFileStateLineageChecks = true - if err != nil { - diags = diags.Append(err) - c.showDiagnostics(diags) - return 1 - } - - // Get the context - lr, _, ctxDiags := local.LocalRun(opReq) - diags = diags.Append(ctxDiags) - if ctxDiags.HasErrors() { - c.showDiagnostics(diags) - return 1 - } - - // Get the schemas from the context - schemas, moreDiags := lr.Core.Schemas(lr.Config, lr.InputState) - diags = diags.Append(moreDiags) - if moreDiags.HasErrors() { - c.showDiagnostics(diags) - return 1 - } - var planErr, stateErr error var plan *plans.Plan var stateFile *statefile.File + var config *configs.Config + var schemas *terraform.Schemas // if a path was provided, try to read it as a path to a planfile // if that fails, try to read the cli argument as a path to a statefile if len(args) > 0 { path := args[0] - plan, stateFile, planErr = getPlanFromPath(path) + plan, stateFile, config, planErr = getPlanFromPath(path) if planErr != nil { stateFile, stateErr = getStateFromPath(path) if stateErr != nil { @@ -140,21 +80,51 @@ func (c *ShowCommand) Run(args []string) int { } } } else { - env, err := c.Workspace() + // Load the backend + b, backendDiags := c.Backend(nil) + diags = diags.Append(backendDiags) + if backendDiags.HasErrors() { + c.showDiagnostics(diags) + return 1 + } + c.ignoreRemoteVersionConflict(b) + + workspace, err := c.Workspace() if err != nil { c.Ui.Error(fmt.Sprintf("Error selecting workspace: %s", err)) return 1 } - stateFile, stateErr = getStateFromEnv(b, env) + stateFile, stateErr = getStateFromBackend(b, workspace) if stateErr != nil { c.Ui.Error(stateErr.Error()) return 1 } } + if config != nil || stateFile != nil { + opts, err := c.contextOpts() + if err != nil { + diags = diags.Append(err) + c.showDiagnostics(diags) + return 1 + } + tfCtx, ctxDiags := terraform.NewContext(opts) + diags = diags.Append(ctxDiags) + if ctxDiags.HasErrors() { + c.showDiagnostics(diags) + return 1 + } + var schemaDiags tfdiags.Diagnostics + schemas, schemaDiags = tfCtx.Schemas(config, stateFile.State) + diags = diags.Append(schemaDiags) + if schemaDiags.HasErrors() { + c.showDiagnostics(diags) + return 1 + } + } + if plan != nil { if jsonOutput { - config := lr.Config jsonPlan, err := jsonplan.Marshal(config, plan, stateFile, schemas) if err != nil { @@ -215,52 +185,66 @@ func (c *ShowCommand) Synopsis() string { return "Show the current state or a saved plan" } -// getPlanFromPath returns a plan and statefile if the user-supplied path points -// to a planfile. If both plan and error are nil, the path is likely a -// directory. An error could suggest that the given path points to a statefile. -func getPlanFromPath(path string) (*plans.Plan, *statefile.File, error) { - pr, err := planfile.Open(path) +// getPlanFromPath returns a plan, statefile, and config if the user-supplied +// path points to a planfile. If both plan and error are nil, the path is likely +// a directory. An error could suggest that the given path points to a statefile. +func getPlanFromPath(path string) (*plans.Plan, *statefile.File, *configs.Config, error) { + planReader, err := planfile.Open(path) if err != nil { - return nil, nil, err - } - plan, err := pr.ReadPlan() - if err != nil { - return nil, nil, err + return nil, nil, nil, err } - stateFile, err := pr.ReadStateFile() - return plan, stateFile, err + // Get plan + plan, err := planReader.ReadPlan() + if err != nil { + return nil, nil, nil, err + } + + // Get statefile + stateFile, err := planReader.ReadStateFile() + if err != nil { + return nil, nil, nil, err + } + + // Get config + config, diags := planReader.ReadConfig() + if diags.HasErrors() { + return nil, nil, nil, diags.Err() + } + + return plan, stateFile, config, err } // getStateFromPath returns a statefile if the user-supplied path points to a statefile. func getStateFromPath(path string) (*statefile.File, error) { - f, err := os.Open(path) + file, err := os.Open(path) if err != nil { return nil, fmt.Errorf("Error loading statefile: %s", err) } - defer f.Close() + defer file.Close() var stateFile *statefile.File - stateFile, err = statefile.Read(f) + stateFile, err = statefile.Read(file) if err != nil { return nil, fmt.Errorf("Error reading %s as a statefile: %s", path, err) } return stateFile, nil } -// getStateFromEnv returns the State for the current workspace, if available. -func getStateFromEnv(b backend.Backend, env string) (*statefile.File, error) { - // Get the state - stateStore, err := b.StateMgr(env) +// getStateFromBackend returns the State for the current workspace, if available. +func getStateFromBackend(b backend.Backend, workspace string) (*statefile.File, error) { + // Get the state store for the given workspace + stateStore, err := b.StateMgr(workspace) if err != nil { return nil, fmt.Errorf("Failed to load state manager: %s", err) } + // Refresh the state store with the latest state snapshot from persistent storage if err := stateStore.RefreshState(); err != nil { return nil, fmt.Errorf("Failed to load state: %s", err) } - sf := statemgr.Export(stateStore) - - return sf, nil + // Get the latest state snapshot and return it + stateFile := statemgr.Export(stateStore) + return stateFile, nil } From fea8f6cfa220e3db34b30d8c21345aa0fa6f01e4 Mon Sep 17 00:00:00 2001 From: Krista LaFentres Date: Mon, 10 Jan 2022 17:16:12 -0600 Subject: [PATCH 49/68] cli: Migrate show command to use command arguments and views --- internal/command/arguments/show.go | 59 +++ internal/command/arguments/show_test.go | 99 ++++ internal/command/show.go | 256 +++++----- internal/command/show_test.go | 466 ++++++++++++------ .../show-json-state/empty/terraform.tfstate | 0 .../show-json-state/no-state/output.json | 3 + internal/command/views/show.go | 82 ++- internal/command/views/show_test.go | 184 +++++++ internal/command/views/testdata/show/main.tf | 3 + 9 files changed, 871 insertions(+), 281 deletions(-) create mode 100644 internal/command/arguments/show.go create mode 100644 internal/command/arguments/show_test.go create mode 100644 internal/command/testdata/show-json-state/empty/terraform.tfstate create mode 100644 internal/command/testdata/show-json-state/no-state/output.json create mode 100644 internal/command/views/show_test.go create mode 100644 internal/command/views/testdata/show/main.tf diff --git a/internal/command/arguments/show.go b/internal/command/arguments/show.go new file mode 100644 index 000000000..4d95fc1da --- /dev/null +++ b/internal/command/arguments/show.go @@ -0,0 +1,59 @@ +package arguments + +import ( + "github.com/hashicorp/terraform/internal/tfdiags" +) + +// Show represents the command-line arguments for the show command. +type Show struct { + // Path is the path to the state file or plan file to be displayed. If + // unspecified, show will display the latest state snapshot. + Path string + + // ViewType specifies which output format to use: human, JSON, or "raw". + ViewType ViewType +} + +// ParseShow processes CLI arguments, returning a Show value and errors. +// If errors are encountered, a Show value is still returned representing +// the best effort interpretation of the arguments. +func ParseShow(args []string) (*Show, tfdiags.Diagnostics) { + var diags tfdiags.Diagnostics + show := &Show{ + Path: "", + } + + var jsonOutput bool + cmdFlags := defaultFlagSet("show") + cmdFlags.BoolVar(&jsonOutput, "json", false, "json") + + if err := cmdFlags.Parse(args); err != nil { + diags = diags.Append(tfdiags.Sourceless( + tfdiags.Error, + "Failed to parse command-line flags", + err.Error(), + )) + } + + args = cmdFlags.Args() + if len(args) > 1 { + diags = diags.Append(tfdiags.Sourceless( + tfdiags.Error, + "Too many command line arguments", + "Expected at most one positional argument.", + )) + } + + if len(args) > 0 { + show.Path = args[0] + } + + switch { + case jsonOutput: + show.ViewType = ViewJSON + default: + show.ViewType = ViewHuman + } + + return show, diags +} diff --git a/internal/command/arguments/show_test.go b/internal/command/arguments/show_test.go new file mode 100644 index 000000000..5088e1a94 --- /dev/null +++ b/internal/command/arguments/show_test.go @@ -0,0 +1,99 @@ +package arguments + +import ( + "reflect" + "testing" + + "github.com/davecgh/go-spew/spew" + "github.com/hashicorp/terraform/internal/tfdiags" +) + +func TestParseShow_valid(t *testing.T) { + testCases := map[string]struct { + args []string + want *Show + }{ + "defaults": { + nil, + &Show{ + Path: "", + ViewType: ViewHuman, + }, + }, + "json": { + []string{"-json"}, + &Show{ + Path: "", + ViewType: ViewJSON, + }, + }, + "path": { + []string{"-json", "foo"}, + &Show{ + Path: "foo", + ViewType: ViewJSON, + }, + }, + } + + for name, tc := range testCases { + t.Run(name, func(t *testing.T) { + got, diags := ParseShow(tc.args) + if len(diags) > 0 { + t.Fatalf("unexpected diags: %v", diags) + } + if *got != *tc.want { + t.Fatalf("unexpected result\n got: %#v\nwant: %#v", got, tc.want) + } + }) + } +} + +func TestParseShow_invalid(t *testing.T) { + testCases := map[string]struct { + args []string + want *Show + wantDiags tfdiags.Diagnostics + }{ + "unknown flag": { + []string{"-boop"}, + &Show{ + Path: "", + ViewType: ViewHuman, + }, + tfdiags.Diagnostics{ + tfdiags.Sourceless( + tfdiags.Error, + "Failed to parse command-line flags", + "flag provided but not defined: -boop", + ), + }, + }, + "too many arguments": { + []string{"-json", "bar", "baz"}, + &Show{ + Path: "bar", + ViewType: ViewJSON, + }, + tfdiags.Diagnostics{ + tfdiags.Sourceless( + tfdiags.Error, + "Too many command line arguments", + "Expected at most one positional argument.", + ), + }, + }, + } + + for name, tc := range testCases { + t.Run(name, func(t *testing.T) { + got, gotDiags := ParseShow(tc.args) + if *got != *tc.want { + t.Fatalf("unexpected result\n got: %#v\nwant: %#v", got, tc.want) + } + if !reflect.DeepEqual(gotDiags, tc.wantDiags) { + t.Errorf("wrong result\ngot: %s\nwant: %s", spew.Sdump(gotDiags), spew.Sdump(tc.wantDiags)) + } + }) + } +} diff --git a/internal/command/show.go b/internal/command/show.go index c3c88ebf6..0b16ee735 100644 --- a/internal/command/show.go +++ b/internal/command/show.go @@ -7,9 +7,6 @@ import ( "github.com/hashicorp/terraform/internal/backend" "github.com/hashicorp/terraform/internal/command/arguments" - "github.com/hashicorp/terraform/internal/command/format" - "github.com/hashicorp/terraform/internal/command/jsonplan" - "github.com/hashicorp/terraform/internal/command/jsonstate" "github.com/hashicorp/terraform/internal/command/views" "github.com/hashicorp/terraform/internal/configs" "github.com/hashicorp/terraform/internal/plans" @@ -26,142 +23,40 @@ type ShowCommand struct { Meta } -func (c *ShowCommand) Run(args []string) int { - args = c.Meta.process(args) - cmdFlags := c.Meta.defaultFlagSet("show") - var jsonOutput bool - cmdFlags.BoolVar(&jsonOutput, "json", false, "produce JSON output") - cmdFlags.Usage = func() { c.Ui.Error(c.Help()) } - if err := cmdFlags.Parse(args); err != nil { - c.Ui.Error(fmt.Sprintf("Error parsing command-line flags: %s\n", err.Error())) +func (c *ShowCommand) Run(rawArgs []string) int { + // Parse and apply global view arguments + common, rawArgs := arguments.ParseView(rawArgs) + c.View.Configure(common) + + // Parse and validate flags + args, diags := arguments.ParseShow(rawArgs) + if diags.HasErrors() { + c.View.Diagnostics(diags) + c.View.HelpPrompt("show") return 1 } - args = cmdFlags.Args() - if len(args) > 2 { - c.Ui.Error( - "The show command expects at most two arguments.\n The path to a " + - "Terraform state or plan file, and optionally -json for json output.\n") - cmdFlags.Usage() - return 1 - } + // Set up view + view := views.NewShow(args.ViewType, c.View) // Check for user-supplied plugin path var err error if c.pluginPath, err = c.loadPluginPath(); err != nil { - c.Ui.Error(fmt.Sprintf("Error loading plugin path: %s", err)) + diags = diags.Append(fmt.Errorf("error loading plugin path: %s", err)) + view.Diagnostics(diags) return 1 } - var diags tfdiags.Diagnostics - - var planErr, stateErr error - var plan *plans.Plan - var stateFile *statefile.File - var config *configs.Config - var schemas *terraform.Schemas - - // if a path was provided, try to read it as a path to a planfile - // if that fails, try to read the cli argument as a path to a statefile - if len(args) > 0 { - path := args[0] - plan, stateFile, config, planErr = getPlanFromPath(path) - if planErr != nil { - stateFile, stateErr = getStateFromPath(path) - if stateErr != nil { - c.Ui.Error(fmt.Sprintf( - "Terraform couldn't read the given file as a state or plan file.\n"+ - "The errors while attempting to read the file as each format are\n"+ - "shown below.\n\n"+ - "State read error: %s\n\nPlan read error: %s", - stateErr, - planErr)) - return 1 - } - } - } else { - // Load the backend - b, backendDiags := c.Backend(nil) - diags = diags.Append(backendDiags) - if backendDiags.HasErrors() { - c.showDiagnostics(diags) - return 1 - } - c.ignoreRemoteVersionConflict(b) - - workspace, err := c.Workspace() - if err != nil { - c.Ui.Error(fmt.Sprintf("Error selecting workspace: %s", err)) - return 1 - } - stateFile, stateErr = getStateFromBackend(b, workspace) - if stateErr != nil { - c.Ui.Error(stateErr.Error()) - return 1 - } + // Get the data we need to display + plan, stateFile, config, schemas, showDiags := c.show(args.Path) + diags = diags.Append(showDiags) + if showDiags.HasErrors() { + view.Diagnostics(diags) + return 1 } - if config != nil || stateFile != nil { - opts, err := c.contextOpts() - if err != nil { - diags = diags.Append(err) - c.showDiagnostics(diags) - return 1 - } - tfCtx, ctxDiags := terraform.NewContext(opts) - diags = diags.Append(ctxDiags) - if ctxDiags.HasErrors() { - c.showDiagnostics(diags) - return 1 - } - var schemaDiags tfdiags.Diagnostics - schemas, schemaDiags = tfCtx.Schemas(config, stateFile.State) - diags = diags.Append(schemaDiags) - if schemaDiags.HasErrors() { - c.showDiagnostics(diags) - return 1 - } - } - - if plan != nil { - if jsonOutput { - jsonPlan, err := jsonplan.Marshal(config, plan, stateFile, schemas) - - if err != nil { - c.Ui.Error(fmt.Sprintf("Failed to marshal plan to json: %s", err)) - return 1 - } - c.Ui.Output(string(jsonPlan)) - return 0 - } - - view := views.NewShow(arguments.ViewHuman, c.View) - view.Plan(plan, schemas) - return 0 - } - - if jsonOutput { - // At this point, it is possible that there is neither state nor a plan. - // That's ok, we'll just return an empty object. - jsonState, err := jsonstate.Marshal(stateFile, schemas) - if err != nil { - c.Ui.Error(fmt.Sprintf("Failed to marshal state to json: %s", err)) - return 1 - } - c.Ui.Output(string(jsonState)) - } else { - if stateFile == nil { - c.Ui.Output("No state.") - return 0 - } - c.Ui.Output(format.State(&format.StateOpts{ - State: stateFile.State, - Color: c.Colorize(), - Schemas: schemas, - })) - } - - return 0 + // Display the data + return view.Display(config, plan, stateFile, schemas) } func (c *ShowCommand) Help() string { @@ -185,8 +80,113 @@ func (c *ShowCommand) Synopsis() string { return "Show the current state or a saved plan" } +func (c *ShowCommand) show(path string) (*plans.Plan, *statefile.File, *configs.Config, *terraform.Schemas, tfdiags.Diagnostics) { + var diags, showDiags tfdiags.Diagnostics + var plan *plans.Plan + var stateFile *statefile.File + var config *configs.Config + var schemas *terraform.Schemas + + // No plan file or state file argument provided, + // so get the latest state snapshot + if path == "" { + stateFile, showDiags = c.showFromLatestStateSnapshot() + diags = diags.Append(showDiags) + if showDiags.HasErrors() { + return plan, stateFile, config, schemas, diags + } + } + + // Plan file or state file argument provided, + // so try to load the argument as a plan file first. + // If that fails, try to load it as a statefile. + if path != "" { + plan, stateFile, config, showDiags = c.showFromPath(path) + diags = diags.Append(showDiags) + if showDiags.HasErrors() { + return plan, stateFile, config, schemas, diags + } + } + + // Get schemas, if possible + if config != nil || stateFile != nil { + opts, err := c.contextOpts() + if err != nil { + diags = diags.Append(err) + return plan, stateFile, config, schemas, diags + } + tfCtx, ctxDiags := terraform.NewContext(opts) + diags = diags.Append(ctxDiags) + if ctxDiags.HasErrors() { + return plan, stateFile, config, schemas, diags + } + var schemaDiags tfdiags.Diagnostics + schemas, schemaDiags = tfCtx.Schemas(config, stateFile.State) + diags = diags.Append(schemaDiags) + if schemaDiags.HasErrors() { + return plan, stateFile, config, schemas, diags + } + } + + return plan, stateFile, config, schemas, diags +} +func (c *ShowCommand) showFromLatestStateSnapshot() (*statefile.File, tfdiags.Diagnostics) { + var diags tfdiags.Diagnostics + + // Load the backend + b, backendDiags := c.Backend(nil) + diags = diags.Append(backendDiags) + if backendDiags.HasErrors() { + return nil, diags + } + c.ignoreRemoteVersionConflict(b) + + // Load the workspace + workspace, err := c.Workspace() + if err != nil { + diags = diags.Append(fmt.Errorf("error selecting workspace: %s", err)) + return nil, diags + } + + // Get the latest state snapshot from the backend for the current workspace + stateFile, stateErr := getStateFromBackend(b, workspace) + if stateErr != nil { + diags = diags.Append(stateErr.Error()) + return nil, diags + } + + return stateFile, diags +} + +func (c *ShowCommand) showFromPath(path string) (*plans.Plan, *statefile.File, *configs.Config, tfdiags.Diagnostics) { + var diags tfdiags.Diagnostics + var planErr, stateErr error + var plan *plans.Plan + var stateFile *statefile.File + var config *configs.Config + + // Try to get the plan file and associated data from + // the path argument. If that fails, try to get the + // statefile from the path argument. + plan, stateFile, config, planErr = getPlanFromPath(path) + if planErr != nil { + stateFile, stateErr = getStateFromPath(path) + if stateErr != nil { + diags = diags.Append( + tfdiags.Sourceless( + tfdiags.Error, + "Failed to read the given file as a state or plan file", + fmt.Sprintf("State read error: %s\n\nPlan read error: %s", stateErr, planErr), + ), + ) + return nil, nil, nil, diags + } + } + return plan, stateFile, config, diags +} + // getPlanFromPath returns a plan, statefile, and config if the user-supplied -// path points to a planfile. If both plan and error are nil, the path is likely +// path points to a plan file. If both plan and error are nil, the path is likely // a directory. An error could suggest that the given path points to a statefile. func getPlanFromPath(path string) (*plans.Plan, *statefile.File, *configs.Config, error) { planReader, err := planfile.Open(path) diff --git a/internal/command/show_test.go b/internal/command/show_test.go index 25504d2c2..5f220e906 100644 --- a/internal/command/show_test.go +++ b/internal/command/show_test.go @@ -2,7 +2,6 @@ package command import ( "encoding/json" - "fmt" "io/ioutil" "os" "path/filepath" @@ -22,13 +21,11 @@ import ( "github.com/zclconf/go-cty/cty" ) -func TestShow(t *testing.T) { - ui := new(cli.MockUi) - view, _ := testView(t) +func TestShow_badArgs(t *testing.T) { + view, done := testView(t) c := &ShowCommand{ Meta: Meta{ testingOverrides: metaOverridesForProvider(testProvider()), - Ui: ui, View: view, }, } @@ -36,40 +33,99 @@ func TestShow(t *testing.T) { args := []string{ "bad", "bad", + "-no-color", } - if code := c.Run(args); code != 1 { - t.Fatalf("bad: \n%s", ui.OutputWriter.String()) + + code := c.Run(args) + output := done(t) + + if code != 1 { + t.Fatalf("unexpected exit status %d; want 1\ngot: %s", code, output.Stdout()) } } -func TestShow_noArgs(t *testing.T) { +func TestShow_noArgsNoState(t *testing.T) { + view, done := testView(t) + c := &ShowCommand{ + Meta: Meta{ + testingOverrides: metaOverridesForProvider(testProvider()), + View: view, + }, + } + + code := c.Run([]string{}) + output := done(t) + + if code != 0 { + t.Fatalf("unexpected exit status %d; want 0\ngot: %s", code, output.Stderr()) + } + + got := output.Stdout() + want := `No state.` + if !strings.Contains(got, want) { + t.Fatalf("unexpected output\ngot: %s\nwant: %s", got, want) + } +} + +func TestShow_noArgsWithState(t *testing.T) { // Get a temp cwd tmp, cwd := testCwd(t) defer testFixCwd(t, tmp, cwd) // Create the default state testStateFileDefault(t, testState()) - ui := new(cli.MockUi) - view, _ := testView(t) + view, done := testView(t) c := &ShowCommand{ Meta: Meta{ testingOverrides: metaOverridesForProvider(testProvider()), - Ui: ui, View: view, }, } - if code := c.Run([]string{}); code != 0 { - t.Fatalf("bad: \n%s", ui.OutputWriter.String()) + code := c.Run([]string{}) + output := done(t) + + if code != 0 { + t.Fatalf("unexpected exit status %d; want 0\ngot: %s", code, output.Stderr()) } - if !strings.Contains(ui.OutputWriter.String(), "# test_instance.foo:") { - t.Fatalf("bad: \n%s", ui.ErrorWriter.String()) + got := output.Stdout() + want := `# test_instance.foo:` + if !strings.Contains(got, want) { + t.Fatalf("unexpected output\ngot: %s\nwant: %s", got, want) + } +} + +func TestShow_argsWithState(t *testing.T) { + // Create the default state + statePath := testStateFile(t, testState()) + stateDir := filepath.Dir(statePath) + defer os.RemoveAll(stateDir) + defer testChdir(t, stateDir)() + + view, done := testView(t) + c := &ShowCommand{ + Meta: Meta{ + testingOverrides: metaOverridesForProvider(testProvider()), + View: view, + }, + } + + path := filepath.Base(statePath) + args := []string{ + path, + "-no-color", + } + code := c.Run(args) + output := done(t) + + if code != 0 { + t.Fatalf("unexpected exit status %d; want 0\ngot: %s", code, output.Stderr()) } } // https://github.com/hashicorp/terraform/issues/21462 -func TestShow_aliasedProvider(t *testing.T) { +func TestShow_argsWithStateAliasedProvider(t *testing.T) { // Create the default state with aliased resource testState := states.BuildState(func(s *states.SyncState) { s.SetResourceInstanceCurrent( @@ -95,103 +151,198 @@ func TestShow_aliasedProvider(t *testing.T) { defer os.RemoveAll(stateDir) defer testChdir(t, stateDir)() - ui := new(cli.MockUi) - view, _ := testView(t) + view, done := testView(t) c := &ShowCommand{ Meta: Meta{ testingOverrides: metaOverridesForProvider(testProvider()), - Ui: ui, View: view, }, } - // the statefile created by testStateFile is named state.tfstate - args := []string{"state.tfstate"} - if code := c.Run(args); code != 0 { - t.Fatalf("bad exit code: \n%s", ui.OutputWriter.String()) + path := filepath.Base(statePath) + args := []string{ + path, + "-no-color", + } + code := c.Run(args) + output := done(t) + + if code != 0 { + t.Fatalf("unexpected exit status %d; want 0\ngot: %s", code, output.Stderr()) } - if strings.Contains(ui.OutputWriter.String(), "# missing schema for provider \"test.alias\"") { - t.Fatalf("bad output: \n%s", ui.OutputWriter.String()) + got := output.Stdout() + want := `# missing schema for provider \"test.alias\"` + if strings.Contains(got, want) { + t.Fatalf("unexpected output\ngot: %s", got) } } -func TestShow_noArgsNoState(t *testing.T) { - // Create the default state - statePath := testStateFile(t, testState()) - stateDir := filepath.Dir(statePath) - defer os.RemoveAll(stateDir) - defer testChdir(t, stateDir)() - - ui := new(cli.MockUi) - view, _ := testView(t) +func TestShow_argsPlanFileDoesNotExist(t *testing.T) { + view, done := testView(t) c := &ShowCommand{ Meta: Meta{ testingOverrides: metaOverridesForProvider(testProvider()), - Ui: ui, View: view, }, } - // the statefile created by testStateFile is named state.tfstate - args := []string{"state.tfstate"} - if code := c.Run(args); code != 0 { - t.Fatalf("bad: \n%s", ui.OutputWriter.String()) + args := []string{ + "doesNotExist.tfplan", + "-no-color", + } + code := c.Run(args) + output := done(t) + + if code != 1 { + t.Fatalf("unexpected exit status %d; want 1\ngot: %s", code, output.Stdout()) + } + + got := output.Stderr() + want := `Plan read error: open doesNotExist.tfplan:` + if !strings.Contains(got, want) { + t.Errorf("unexpected output\ngot: %s\nwant:\n%s", got, want) + } +} + +func TestShow_argsStatefileDoesNotExist(t *testing.T) { + view, done := testView(t) + c := &ShowCommand{ + Meta: Meta{ + testingOverrides: metaOverridesForProvider(testProvider()), + View: view, + }, + } + + args := []string{ + "doesNotExist.tfstate", + "-no-color", + } + code := c.Run(args) + output := done(t) + + if code != 1 { + t.Fatalf("unexpected exit status %d; want 1\ngot: %s", code, output.Stdout()) + } + + got := output.Stderr() + want := `State read error: Error loading statefile:` + if !strings.Contains(got, want) { + t.Errorf("unexpected output\ngot: %s\nwant:\n%s", got, want) + } +} + +func TestShow_json_argsPlanFileDoesNotExist(t *testing.T) { + view, done := testView(t) + c := &ShowCommand{ + Meta: Meta{ + testingOverrides: metaOverridesForProvider(testProvider()), + View: view, + }, + } + + args := []string{ + "-json", + "doesNotExist.tfplan", + "-no-color", + } + code := c.Run(args) + output := done(t) + + if code != 1 { + t.Fatalf("unexpected exit status %d; want 1\ngot: %s", code, output.Stdout()) + } + + got := output.Stderr() + want := `Plan read error: open doesNotExist.tfplan:` + if !strings.Contains(got, want) { + t.Errorf("unexpected output\ngot: %s\nwant:\n%s", got, want) + } +} + +func TestShow_json_argsStatefileDoesNotExist(t *testing.T) { + view, done := testView(t) + c := &ShowCommand{ + Meta: Meta{ + testingOverrides: metaOverridesForProvider(testProvider()), + View: view, + }, + } + + args := []string{ + "-json", + "doesNotExist.tfstate", + "-no-color", + } + code := c.Run(args) + output := done(t) + + if code != 1 { + t.Fatalf("unexpected exit status %d; want 1\ngot: %s", code, output.Stdout()) + } + + got := output.Stderr() + want := `State read error: Error loading statefile:` + if !strings.Contains(got, want) { + t.Errorf("unexpected output\ngot: %s\nwant:\n%s", got, want) } } func TestShow_planNoop(t *testing.T) { planPath := testPlanFileNoop(t) - ui := cli.NewMockUi() view, done := testView(t) c := &ShowCommand{ Meta: Meta{ testingOverrides: metaOverridesForProvider(testProvider()), - Ui: ui, View: view, }, } args := []string{ planPath, + "-no-color", } - if code := c.Run(args); code != 0 { - t.Fatalf("bad: \n%s", ui.ErrorWriter.String()) + code := c.Run(args) + output := done(t) + + if code != 0 { + t.Fatalf("unexpected exit status %d; want 0\ngot: %s", code, output.Stderr()) } + got := output.Stdout() want := `No changes. Your infrastructure matches the configuration.` - got := done(t).Stdout() if !strings.Contains(got, want) { - t.Errorf("missing expected output\nwant: %s\ngot:\n%s", want, got) + t.Errorf("unexpected output\ngot: %s\nwant:\n%s", got, want) } } func TestShow_planWithChanges(t *testing.T) { planPathWithChanges := showFixturePlanFile(t, plans.DeleteThenCreate) - ui := cli.NewMockUi() view, done := testView(t) c := &ShowCommand{ Meta: Meta{ testingOverrides: metaOverridesForProvider(showFixtureProvider()), - Ui: ui, View: view, }, } args := []string{ planPathWithChanges, + "-no-color", + } + code := c.Run(args) + output := done(t) + + if code != 0 { + t.Fatalf("unexpected exit status %d; want 0\ngot: %s", code, output.Stderr()) } - if code := c.Run(args); code != 0 { - t.Fatalf("bad: \n%s", ui.ErrorWriter.String()) - } - + got := output.Stdout() want := `test_instance.foo must be replaced` - got := done(t).Stdout() if !strings.Contains(got, want) { - t.Errorf("missing expected output\nwant: %s\ngot:\n%s", want, got) + t.Fatalf("unexpected output\ngot: %s\nwant: %s", got, want) } } @@ -239,30 +390,34 @@ func TestShow_planWithForceReplaceChange(t *testing.T) { plan, ) - ui := cli.NewMockUi() view, done := testView(t) c := &ShowCommand{ Meta: Meta{ testingOverrides: metaOverridesForProvider(showFixtureProvider()), - Ui: ui, View: view, }, } args := []string{ planFilePath, + "-no-color", + } + code := c.Run(args) + output := done(t) + + if code != 0 { + t.Fatalf("unexpected exit status %d; want 0\ngot: %s", code, output.Stderr()) } - if code := c.Run(args); code != 0 { - t.Fatalf("bad: \n%s", ui.ErrorWriter.String()) + got := output.Stdout() + want := `test_instance.foo will be replaced, as requested` + if !strings.Contains(got, want) { + t.Fatalf("unexpected output\ngot: %s\nwant: %s", got, want) } - got := done(t).Stdout() - if want := `test_instance.foo will be replaced, as requested`; !strings.Contains(got, want) { - t.Errorf("wrong output\ngot:\n%s\n\nwant substring: %s", got, want) - } - if want := `Plan: 1 to add, 0 to change, 1 to destroy.`; !strings.Contains(got, want) { - t.Errorf("wrong output\ngot:\n%s\n\nwant substring: %s", got, want) + want = `Plan: 1 to add, 0 to change, 1 to destroy.` + if !strings.Contains(got, want) { + t.Fatalf("unexpected output\ngot: %s\nwant: %s", got, want) } } @@ -270,12 +425,10 @@ func TestShow_planWithForceReplaceChange(t *testing.T) { func TestShow_plan_json(t *testing.T) { planPath := showFixturePlanFile(t, plans.Create) - ui := new(cli.MockUi) - view, _ := testView(t) + view, done := testView(t) c := &ShowCommand{ Meta: Meta{ testingOverrides: metaOverridesForProvider(showFixtureProvider()), - Ui: ui, View: view, }, } @@ -283,9 +436,13 @@ func TestShow_plan_json(t *testing.T) { args := []string{ "-json", planPath, + "-no-color", } - if code := c.Run(args); code != 0 { - t.Fatalf("bad: \n%s", ui.ErrorWriter.String()) + code := c.Run(args) + output := done(t) + + if code != 0 { + t.Fatalf("unexpected exit status %d; want 0\ngot: %s", code, output.Stderr()) } } @@ -294,21 +451,23 @@ func TestShow_state(t *testing.T) { statePath := testStateFile(t, originalState) defer os.RemoveAll(filepath.Dir(statePath)) - ui := new(cli.MockUi) - view, _ := testView(t) + view, done := testView(t) c := &ShowCommand{ Meta: Meta{ testingOverrides: metaOverridesForProvider(testProvider()), - Ui: ui, View: view, }, } args := []string{ statePath, + "-no-color", } - if code := c.Run(args); code != 0 { - t.Fatalf("bad: \n%s", ui.ErrorWriter.String()) + code := c.Run(args) + output := done(t) + + if code != 0 { + t.Fatalf("unexpected exit status %d; want 0\ngot: %s", code, output.Stderr()) } } @@ -339,18 +498,15 @@ func TestShow_json_output(t *testing.T) { defer close() p := showFixtureProvider() - ui := new(cli.MockUi) - view, _ := testView(t) - m := Meta{ - testingOverrides: metaOverridesForProvider(p), - Ui: ui, - View: view, - ProviderSource: providerSource, - } // init + ui := new(cli.MockUi) ic := &InitCommand{ - Meta: m, + Meta: Meta{ + testingOverrides: metaOverridesForProvider(p), + Ui: ui, + ProviderSource: providerSource, + }, } if code := ic.Run([]string{}); code != 0 { if expectError { @@ -360,22 +516,35 @@ func TestShow_json_output(t *testing.T) { t.Fatalf("init failed\n%s", ui.ErrorWriter) } + // plan + planView, planDone := testView(t) pc := &PlanCommand{ - Meta: m, + Meta: Meta{ + testingOverrides: metaOverridesForProvider(p), + View: planView, + ProviderSource: providerSource, + }, } args := []string{ "-out=terraform.plan", } - if code := pc.Run(args); code != 0 { - t.Fatalf("wrong exit status %d; want 0\nstderr: %s", code, ui.ErrorWriter.String()) + code := pc.Run(args) + planOutput := planDone(t) + + if code != 0 { + t.Fatalf("unexpected exit status %d; want 0\ngot: %s", code, planOutput.Stderr()) } - // flush the plan output from the mock ui - ui.OutputWriter.Reset() + // show + showView, showDone := testView(t) sc := &ShowCommand{ - Meta: m, + Meta: Meta{ + testingOverrides: metaOverridesForProvider(p), + View: showView, + ProviderSource: providerSource, + }, } args = []string{ @@ -383,25 +552,27 @@ func TestShow_json_output(t *testing.T) { "terraform.plan", } defer os.Remove("terraform.plan") + code = sc.Run(args) + showOutput := showDone(t) - if code := sc.Run(args); code != 0 { - t.Fatalf("wrong exit status %d; want 0\nstderr: %s", code, ui.ErrorWriter.String()) + if code != 0 { + t.Fatalf("unexpected exit status %d; want 0\ngot: %s", code, showOutput.Stderr()) } - // compare ui output to wanted output + // compare view output to wanted output var got, want plan - gotString := ui.OutputWriter.String() + gotString := showOutput.Stdout() json.Unmarshal([]byte(gotString), &got) wantFile, err := os.Open("output.json") if err != nil { - t.Fatalf("err: %s", err) + t.Fatalf("unexpected err: %s", err) } defer wantFile.Close() byteValue, err := ioutil.ReadAll(wantFile) if err != nil { - t.Fatalf("err: %s", err) + t.Fatalf("unexpected err: %s", err) } json.Unmarshal([]byte(byteValue), &want) @@ -423,43 +594,48 @@ func TestShow_json_output_sensitive(t *testing.T) { defer close() p := showFixtureSensitiveProvider() - ui := new(cli.MockUi) - view, _ := testView(t) - m := Meta{ - testingOverrides: metaOverridesForProvider(p), - Ui: ui, - View: view, - ProviderSource: providerSource, - } // init + ui := new(cli.MockUi) ic := &InitCommand{ - Meta: m, + Meta: Meta{ + testingOverrides: metaOverridesForProvider(p), + Ui: ui, + ProviderSource: providerSource, + }, } if code := ic.Run([]string{}); code != 0 { t.Fatalf("init failed\n%s", ui.ErrorWriter) } - // flush init output - ui.OutputWriter.Reset() - + // plan + planView, planDone := testView(t) pc := &PlanCommand{ - Meta: m, + Meta: Meta{ + testingOverrides: metaOverridesForProvider(p), + View: planView, + ProviderSource: providerSource, + }, } args := []string{ "-out=terraform.plan", } + code := pc.Run(args) + planOutput := planDone(t) - if code := pc.Run(args); code != 0 { - fmt.Println(ui.OutputWriter.String()) - t.Fatalf("wrong exit status %d; want 0\nstderr: %s", code, ui.ErrorWriter.String()) + if code != 0 { + t.Fatalf("unexpected exit status %d; want 0\ngot: %s", code, planOutput.Stderr()) } - // flush the plan output from the mock ui - ui.OutputWriter.Reset() + // show + showView, showDone := testView(t) sc := &ShowCommand{ - Meta: m, + Meta: Meta{ + testingOverrides: metaOverridesForProvider(p), + View: showView, + ProviderSource: providerSource, + }, } args = []string{ @@ -467,25 +643,27 @@ func TestShow_json_output_sensitive(t *testing.T) { "terraform.plan", } defer os.Remove("terraform.plan") + code = sc.Run(args) + showOutput := showDone(t) - if code := sc.Run(args); code != 0 { - t.Fatalf("wrong exit status %d; want 0\nstderr: %s", code, ui.ErrorWriter.String()) + if code != 0 { + t.Fatalf("unexpected exit status %d; want 0\ngot: %s", code, showOutput.Stderr()) } // compare ui output to wanted output var got, want plan - gotString := ui.OutputWriter.String() + gotString := showOutput.Stdout() json.Unmarshal([]byte(gotString), &got) wantFile, err := os.Open("output.json") if err != nil { - t.Fatalf("err: %s", err) + t.Fatalf("unexpected err: %s", err) } defer wantFile.Close() byteValue, err := ioutil.ReadAll(wantFile) if err != nil { - t.Fatalf("err: %s", err) + t.Fatalf("unexpected err: %s", err) } json.Unmarshal([]byte(byteValue), &want) @@ -520,31 +698,35 @@ func TestShow_json_output_state(t *testing.T) { defer close() p := showFixtureProvider() - ui := new(cli.MockUi) - view, _ := testView(t) - m := Meta{ - testingOverrides: metaOverridesForProvider(p), - Ui: ui, - View: view, - ProviderSource: providerSource, - } // init + ui := new(cli.MockUi) ic := &InitCommand{ - Meta: m, + Meta: Meta{ + testingOverrides: metaOverridesForProvider(p), + Ui: ui, + ProviderSource: providerSource, + }, } if code := ic.Run([]string{}); code != 0 { t.Fatalf("init failed\n%s", ui.ErrorWriter) } - // flush the plan output from the mock ui - ui.OutputWriter.Reset() + // show + showView, showDone := testView(t) sc := &ShowCommand{ - Meta: m, + Meta: Meta{ + testingOverrides: metaOverridesForProvider(p), + View: showView, + ProviderSource: providerSource, + }, } - if code := sc.Run([]string{"-json"}); code != 0 { - t.Fatalf("wrong exit status %d; want 0\nstderr: %s", code, ui.ErrorWriter.String()) + code := sc.Run([]string{"-json"}) + showOutput := showDone(t) + + if code != 0 { + t.Fatalf("unexpected exit status %d; want 0\ngot: %s", code, showOutput.Stderr()) } // compare ui output to wanted output @@ -556,17 +738,17 @@ func TestShow_json_output_state(t *testing.T) { } var got, want state - gotString := ui.OutputWriter.String() + gotString := showOutput.Stdout() json.Unmarshal([]byte(gotString), &got) wantFile, err := os.Open("output.json") if err != nil { - t.Fatalf("err: %s", err) + t.Fatalf("unexpected error: %s", err) } defer wantFile.Close() byteValue, err := ioutil.ReadAll(wantFile) if err != nil { - t.Fatalf("err: %s", err) + t.Fatalf("unexpected err: %s", err) } json.Unmarshal([]byte(byteValue), &want) @@ -599,27 +781,29 @@ func TestShow_planWithNonDefaultStateLineage(t *testing.T) { } planPath := testPlanFileMatchState(t, snap, state, plan, stateMeta) - ui := cli.NewMockUi() view, done := testView(t) c := &ShowCommand{ Meta: Meta{ testingOverrides: metaOverridesForProvider(testProvider()), - Ui: ui, View: view, }, } args := []string{ planPath, + "-no-color", } - if code := c.Run(args); code != 0 { - t.Fatalf("bad: \n%s", ui.ErrorWriter.String()) + code := c.Run(args) + output := done(t) + + if code != 0 { + t.Fatalf("unexpected exit status %d; want 0\ngot: %s", code, output.Stderr()) } + got := output.Stdout() want := `No changes. Your infrastructure matches the configuration.` - got := done(t).Stdout() if !strings.Contains(got, want) { - t.Errorf("missing expected output\nwant: %s\ngot:\n%s", want, got) + t.Fatalf("unexpected output\ngot: %s\nwant: %s", got, want) } } diff --git a/internal/command/testdata/show-json-state/empty/terraform.tfstate b/internal/command/testdata/show-json-state/empty/terraform.tfstate new file mode 100644 index 000000000..e69de29bb diff --git a/internal/command/testdata/show-json-state/no-state/output.json b/internal/command/testdata/show-json-state/no-state/output.json new file mode 100644 index 000000000..381450cad --- /dev/null +++ b/internal/command/testdata/show-json-state/no-state/output.json @@ -0,0 +1,3 @@ +{ + "format_version": "1.0" +} diff --git a/internal/command/views/show.go b/internal/command/views/show.go index bfc45a4ce..1ab16c2d5 100644 --- a/internal/command/views/show.go +++ b/internal/command/views/show.go @@ -2,37 +2,95 @@ package views import ( "fmt" - "github.com/hashicorp/terraform/internal/command/arguments" + "github.com/hashicorp/terraform/internal/command/format" + "github.com/hashicorp/terraform/internal/command/jsonplan" + "github.com/hashicorp/terraform/internal/command/jsonstate" + "github.com/hashicorp/terraform/internal/configs" "github.com/hashicorp/terraform/internal/plans" + "github.com/hashicorp/terraform/internal/states/statefile" "github.com/hashicorp/terraform/internal/terraform" + "github.com/hashicorp/terraform/internal/tfdiags" ) -// FIXME: this is a temporary partial definition of the view for the show -// command, in place to allow access to the plan renderer which is now in the -// views package. type Show interface { - Plan(plan *plans.Plan, schemas *terraform.Schemas) + // Display renders the plan, if it is available. If plan is nil, it renders the statefile. + Display(config *configs.Config, plan *plans.Plan, stateFile *statefile.File, schemas *terraform.Schemas) int + + // Diagnostics renders early diagnostics, resulting from argument parsing. + Diagnostics(diags tfdiags.Diagnostics) } -// FIXME: the show view should support both human and JSON types. This code is -// currently only used to render the plan in human-readable UI, so does not yet -// support JSON. func NewShow(vt arguments.ViewType, view *View) Show { switch vt { + case arguments.ViewJSON: + return &ShowJSON{view: view} case arguments.ViewHuman: - return &ShowHuman{View: *view} + return &ShowHuman{view: view} default: panic(fmt.Sprintf("unknown view type %v", vt)) } } type ShowHuman struct { - View + view *View } var _ Show = (*ShowHuman)(nil) -func (v *ShowHuman) Plan(plan *plans.Plan, schemas *terraform.Schemas) { - renderPlan(plan, schemas, &v.View) +func (v *ShowHuman) Display(config *configs.Config, plan *plans.Plan, stateFile *statefile.File, schemas *terraform.Schemas) int { + if plan != nil { + renderPlan(plan, schemas, v.view) + } else { + if stateFile == nil { + v.view.streams.Println("No state.") + return 0 + } + + v.view.streams.Println(format.State(&format.StateOpts{ + State: stateFile.State, + Color: v.view.colorize, + Schemas: schemas, + })) + } + return 0 +} + +func (v *ShowHuman) Diagnostics(diags tfdiags.Diagnostics) { + v.view.Diagnostics(diags) +} + +type ShowJSON struct { + view *View +} + +var _ Show = (*ShowJSON)(nil) + +func (v *ShowJSON) Display(config *configs.Config, plan *plans.Plan, stateFile *statefile.File, schemas *terraform.Schemas) int { + if plan != nil { + jsonPlan, err := jsonplan.Marshal(config, plan, stateFile, schemas) + + if err != nil { + v.view.streams.Eprintf("Failed to marshal plan to json: %s", err) + return 1 + } + v.view.streams.Println(string(jsonPlan)) + } else { + // It is possible that there is neither state nor a plan. + // That's ok, we'll just return an empty object. + jsonState, err := jsonstate.Marshal(stateFile, schemas) + if err != nil { + v.view.streams.Eprintf("Failed to marshal state to json: %s", err) + return 1 + } + v.view.streams.Println(string(jsonState)) + } + return 0 +} + +// Diagnostics should only be called if show cannot be executed. +// In this case, we choose to render human-readable diagnostic output, +// primarily for backwards compatibility. +func (v *ShowJSON) Diagnostics(diags tfdiags.Diagnostics) { + v.view.Diagnostics(diags) } diff --git a/internal/command/views/show_test.go b/internal/command/views/show_test.go new file mode 100644 index 000000000..fe69130d9 --- /dev/null +++ b/internal/command/views/show_test.go @@ -0,0 +1,184 @@ +package views + +import ( + "encoding/json" + "strings" + "testing" + + "github.com/hashicorp/terraform/internal/addrs" + "github.com/hashicorp/terraform/internal/command/arguments" + "github.com/hashicorp/terraform/internal/configs/configschema" + "github.com/hashicorp/terraform/internal/initwd" + "github.com/hashicorp/terraform/internal/plans" + "github.com/hashicorp/terraform/internal/states" + "github.com/hashicorp/terraform/internal/states/statefile" + "github.com/hashicorp/terraform/internal/terminal" + "github.com/hashicorp/terraform/internal/terraform" + + "github.com/zclconf/go-cty/cty" +) + +func TestShowHuman(t *testing.T) { + testCases := map[string]struct { + plan *plans.Plan + stateFile *statefile.File + schemas *terraform.Schemas + wantExact bool + wantString string + }{ + "plan file": { + testPlan(t), + nil, + testSchemas(), + false, + "# test_resource.foo will be created", + }, + "statefile": { + nil, + &statefile.File{ + Serial: 0, + Lineage: "fake-for-testing", + State: testState(), + }, + testSchemas(), + false, + "# test_resource.foo:", + }, + "empty statefile": { + nil, + &statefile.File{ + Serial: 0, + Lineage: "fake-for-testing", + State: states.NewState(), + }, + testSchemas(), + true, + "\n", + }, + "nothing": { + nil, + nil, + nil, + true, + "No state.\n", + }, + } + for name, testCase := range testCases { + t.Run(name, func(t *testing.T) { + streams, done := terminal.StreamsForTesting(t) + view := NewView(streams) + view.Configure(&arguments.View{NoColor: true}) + v := NewShow(arguments.ViewHuman, view) + + code := v.Display(nil, testCase.plan, testCase.stateFile, testCase.schemas) + if code != 0 { + t.Errorf("expected 0 return code, got %d", code) + } + + output := done(t) + got := output.Stdout() + want := testCase.wantString + if (testCase.wantExact && got != want) || (!testCase.wantExact && !strings.Contains(got, want)) { + t.Fatalf("unexpected output\ngot: %s\nwant: %s", got, want) + } + }) + } +} + +func TestShowJSON(t *testing.T) { + testCases := map[string]struct { + plan *plans.Plan + stateFile *statefile.File + }{ + "plan file": { + testPlan(t), + nil, + }, + "statefile": { + nil, + &statefile.File{ + Serial: 0, + Lineage: "fake-for-testing", + State: testState(), + }, + }, + "empty statefile": { + nil, + &statefile.File{ + Serial: 0, + Lineage: "fake-for-testing", + State: states.NewState(), + }, + }, + "nothing": { + nil, + nil, + }, + } + + config, _, configCleanup := initwd.MustLoadConfigForTests(t, "./testdata/show") + defer configCleanup() + + for name, testCase := range testCases { + t.Run(name, func(t *testing.T) { + streams, done := terminal.StreamsForTesting(t) + view := NewView(streams) + view.Configure(&arguments.View{NoColor: true}) + v := NewShow(arguments.ViewJSON, view) + + schemas := &terraform.Schemas{ + Providers: map[addrs.Provider]*terraform.ProviderSchema{ + addrs.NewDefaultProvider("test"): { + ResourceTypes: map[string]*configschema.Block{ + "test_resource": { + Attributes: map[string]*configschema.Attribute{ + "id": {Type: cty.String, Optional: true, Computed: true}, + "foo": {Type: cty.String, Optional: true}, + }, + }, + }, + }, + }, + } + + code := v.Display(config, testCase.plan, testCase.stateFile, schemas) + + if code != 0 { + t.Errorf("expected 0 return code, got %d", code) + } + + // Make sure the result looks like JSON; we comprehensively test + // the structure of this output in the command package tests. + var result map[string]interface{} + got := done(t).All() + t.Logf("output: %s", got) + if err := json.Unmarshal([]byte(got), &result); err != nil { + t.Fatal(err) + } + }) + } +} + +// testState returns a test State structure. +func testState() *states.State { + return states.BuildState(func(s *states.SyncState) { + s.SetResourceInstanceCurrent( + addrs.Resource{ + Mode: addrs.ManagedResourceMode, + Type: "test_resource", + Name: "foo", + }.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance), + &states.ResourceInstanceObjectSrc{ + AttrsJSON: []byte(`{"id":"bar","foo":"value"}`), + Status: states.ObjectReady, + }, + addrs.AbsProviderConfig{ + Provider: addrs.NewDefaultProvider("test"), + Module: addrs.RootModule, + }, + ) + // DeepCopy is used here to ensure our synthetic state matches exactly + // with a state that will have been copied during the command + // operation, and all fields have been copied correctly. + }).DeepCopy() +} diff --git a/internal/command/views/testdata/show/main.tf b/internal/command/views/testdata/show/main.tf new file mode 100644 index 000000000..e1cca23dd --- /dev/null +++ b/internal/command/views/testdata/show/main.tf @@ -0,0 +1,3 @@ +resource "test_resource" "foo" { + foo = "value" +} From 64e1241ae34013d4db1582ea2caca5c37609418f Mon Sep 17 00:00:00 2001 From: Krista LaFentres Date: Tue, 11 Jan 2022 16:53:36 -0600 Subject: [PATCH 50/68] backend/local: Remove unused DisablePlanFileStateLineageChecks flag Now that show command has been refactored to remove its dependence on a local backend and local run, this flag is no longer needed to fix #30195. --- internal/backend/backend.go | 7 ------- internal/backend/local/backend_local.go | 2 +- 2 files changed, 1 insertion(+), 8 deletions(-) diff --git a/internal/backend/backend.go b/internal/backend/backend.go index 0e1daef40..4124b2abd 100644 --- a/internal/backend/backend.go +++ b/internal/backend/backend.go @@ -275,13 +275,6 @@ type Operation struct { // the variables set in the plan are used instead, and they must be valid. AllowUnsetVariables bool - // When loading a plan file for a read-only operation, we may want to - // disable the state lineage checks which are only relevant for operations - // which can modify state. An example where this is important is showing - // a plan which was prepared against a non-default state file, because the - // lineage checks are always against the default state. - DisablePlanFileStateLineageChecks bool - // View implements the logic for all UI interactions. View views.Operation diff --git a/internal/backend/local/backend_local.go b/internal/backend/local/backend_local.go index a4a4fb67e..6082bfdf6 100644 --- a/internal/backend/local/backend_local.go +++ b/internal/backend/local/backend_local.go @@ -284,7 +284,7 @@ func (b *Local) localRunForPlanFile(op *backend.Operation, pf *planfile.Reader, )) return nil, snap, diags } - if !op.DisablePlanFileStateLineageChecks && currentStateMeta != nil { + if currentStateMeta != nil { // If the caller sets this, we require that the stored prior state // has the same metadata, which is an extra safety check that nothing // has changed since the plan was created. (All of the "real-world" From 08fb9078ea5c05d1e18a6169161c21e56ce3cc24 Mon Sep 17 00:00:00 2001 From: yuriy Date: Mon, 17 Jan 2022 09:09:02 +0100 Subject: [PATCH 51/68] Update aws-sdk-go dependency to support new region (Jakarta) --- go.mod | 4 ++-- go.sum | 4 ++++ 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/go.mod b/go.mod index 194b60ca7..2cbdb0fc0 100644 --- a/go.mod +++ b/go.mod @@ -15,7 +15,7 @@ require ( github.com/apparentlymart/go-userdirs v0.0.0-20200915174352-b0c018a67c13 github.com/apparentlymart/go-versions v1.0.1 github.com/armon/circbuf v0.0.0-20190214190532-5111143e8da2 - github.com/aws/aws-sdk-go v1.40.25 + github.com/aws/aws-sdk-go v1.42.35 github.com/bgentry/speakeasy v0.1.0 github.com/bmatcuk/doublestar v1.1.5 github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e @@ -84,7 +84,7 @@ require ( go.etcd.io/etcd v0.5.0-alpha.5.0.20210428180535-15715dcf1ace golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa golang.org/x/mod v0.4.2 - golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d + golang.org/x/net v0.0.0-20211216030914-fe4d6282115f golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e golang.org/x/term v0.0.0-20210220032956-6a3ed077a48d diff --git a/go.sum b/go.sum index 8ef3722b7..60c55b149 100644 --- a/go.sum +++ b/go.sum @@ -153,6 +153,8 @@ github.com/aws/aws-sdk-go v1.25.3/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN github.com/aws/aws-sdk-go v1.31.9/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0= github.com/aws/aws-sdk-go v1.40.25 h1:Depnx7O86HWgOCLD5nMto6F9Ju85Q1QuFDnbpZYQWno= github.com/aws/aws-sdk-go v1.40.25/go.mod h1:585smgzpB/KqRA+K3y/NL/oYRqQvpNJYvLm+LY1U59Q= +github.com/aws/aws-sdk-go v1.42.35 h1:N4N9buNs4YlosI9N0+WYrq8cIZwdgv34yRbxzZlTvFs= +github.com/aws/aws-sdk-go v1.42.35/go.mod h1:OGr6lGMAKGlG9CVrYnWYDKIyb829c6EVBRjxqjmPepc= github.com/baiyubin/aliyun-sts-go-sdk v0.0.0-20180326062324-cfa1a18b161f h1:ZNv7On9kyUzm7fvRZumSyy/IUiSC7AzL0I1jKKtwooA= github.com/baiyubin/aliyun-sts-go-sdk v0.0.0-20180326062324-cfa1a18b161f/go.mod h1:AuiFmCCPBSrqvVMvuqFuk0qogytodnVFVSN5CeJB8Gc= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= @@ -842,6 +844,8 @@ golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qx golang.org/x/net v0.0.0-20210614182718-04defd469f4e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d h1:20cMwl2fHAzkJMEA+8J4JgqBQcQGzbisXo31MIeenXI= golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211216030914-fe4d6282115f h1:hEYJvxw1lSnWIl8X9ofsYMklzaDs90JI2az5YMd4fPM= +golang.org/x/net v0.0.0-20211216030914-fe4d6282115f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= From d4ebed4c09a9f744bef40f5340509ad5b851438e Mon Sep 17 00:00:00 2001 From: yuriy Date: Mon, 17 Jan 2022 23:08:39 +0100 Subject: [PATCH 52/68] Updates from 'go mod tidy' --- go.sum | 4 ---- 1 file changed, 4 deletions(-) diff --git a/go.sum b/go.sum index 60c55b149..447f84bbb 100644 --- a/go.sum +++ b/go.sum @@ -151,8 +151,6 @@ github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:l github.com/aws/aws-sdk-go v1.15.78/go.mod h1:E3/ieXAlvM0XWO57iftYVDLLvQ824smPP3ATZkfNZeM= github.com/aws/aws-sdk-go v1.25.3/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= github.com/aws/aws-sdk-go v1.31.9/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0= -github.com/aws/aws-sdk-go v1.40.25 h1:Depnx7O86HWgOCLD5nMto6F9Ju85Q1QuFDnbpZYQWno= -github.com/aws/aws-sdk-go v1.40.25/go.mod h1:585smgzpB/KqRA+K3y/NL/oYRqQvpNJYvLm+LY1U59Q= github.com/aws/aws-sdk-go v1.42.35 h1:N4N9buNs4YlosI9N0+WYrq8cIZwdgv34yRbxzZlTvFs= github.com/aws/aws-sdk-go v1.42.35/go.mod h1:OGr6lGMAKGlG9CVrYnWYDKIyb829c6EVBRjxqjmPepc= github.com/baiyubin/aliyun-sts-go-sdk v0.0.0-20180326062324-cfa1a18b161f h1:ZNv7On9kyUzm7fvRZumSyy/IUiSC7AzL0I1jKKtwooA= @@ -841,8 +839,6 @@ golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v golang.org/x/net v0.0.0-20210326060303-6b1517762897/go.mod h1:uSPa2vr4CLtc/ILN5odXGNXS6mhrKVzTaCXzk9m6W3k= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20210614182718-04defd469f4e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d h1:20cMwl2fHAzkJMEA+8J4JgqBQcQGzbisXo31MIeenXI= golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211216030914-fe4d6282115f h1:hEYJvxw1lSnWIl8X9ofsYMklzaDs90JI2az5YMd4fPM= golang.org/x/net v0.0.0-20211216030914-fe4d6282115f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= From fc4ceedc6d2b5d2a93511e5dadd442c984044745 Mon Sep 17 00:00:00 2001 From: Laura Pacilio <83350965+laurapacilio@users.noreply.github.com> Date: Wed, 5 Jan 2022 11:08:33 -0500 Subject: [PATCH 53/68] Remove overview page and update notes on provisioner pages --- .../language/resources/provisioners/chef.mdx | 3 ++- .../resources/provisioners/connection.mdx | 27 ++++++++++--------- .../language/resources/provisioners/file.mdx | 4 +-- .../resources/provisioners/habitat.mdx | 3 ++- .../language/resources/provisioners/index.mdx | 12 --------- .../resources/provisioners/local-exec.mdx | 4 +-- .../resources/provisioners/null_resource.mdx | 4 +++ .../resources/provisioners/puppet.mdx | 3 ++- .../resources/provisioners/remote-exec.mdx | 6 ++--- .../provisioners/salt-masterless.mdx | 3 ++- 10 files changed, 34 insertions(+), 35 deletions(-) delete mode 100644 website/docs/language/resources/provisioners/index.mdx diff --git a/website/docs/language/resources/provisioners/chef.mdx b/website/docs/language/resources/provisioners/chef.mdx index 9ec4887e2..99419c11e 100644 --- a/website/docs/language/resources/provisioners/chef.mdx +++ b/website/docs/language/resources/provisioners/chef.mdx @@ -11,7 +11,8 @@ The `chef` provisioner installs, configures and runs the Chef Client on a remote resource. The `chef` provisioner supports both `ssh` and `winrm` type [connections](/language/resources/provisioners/connection). -!> **Note:** This provisioner was removed in the 0.15.0 version of Terraform after being deprecated as of Terraform 0.13.4. For most common situations there are better alternatives to using provisioners. For more information, see [the main Provisioners page](/language/resources/provisioners). +!> **Warning:** This provisioner was removed in the 0.15.0 version of Terraform after being deprecated as of Terraform 0.13.4. For most common situations there are better alternatives to using provisioners. Refer to +[Declaring Provisioners](/language/resources/provisioners/syntax) for more details. ## Requirements diff --git a/website/docs/language/resources/provisioners/connection.mdx b/website/docs/language/resources/provisioners/connection.mdx index eb1ccff5f..22a4c16a0 100644 --- a/website/docs/language/resources/provisioners/connection.mdx +++ b/website/docs/language/resources/provisioners/connection.mdx @@ -11,19 +11,10 @@ Most provisioners require access to the remote resource via SSH or WinRM, and expect a nested `connection` block with details about how to connect. -> **Note:** Provisioners should only be used as a last resort. For most -common situations there are better alternatives. For more information, see -[the main Provisioners page](/language/resources/provisioners). +common situations there are better alternatives. Refer to +[Declaring Provisioners](/language/resources/provisioners/syntax) for more details. --> **Note:** In Terraform 0.11 and earlier, providers could set default values -for some connection settings, so that `connection` blocks could sometimes be -omitted. This feature was removed in 0.12 in order to make Terraform's behavior -more predictable. - --> **Note:** Since the SSH connection type is most often used with -newly-created remote resources, validation of SSH host keys is disabled by -default. In scenarios where this is not acceptable, a separate mechanism for -key distribution could be established and the `host_key` directive documented -below explicitly set to verify against a specific key or signing CA. +## Connection Block Connection blocks don't take a block label, and can be nested within either a `resource` or a `provisioner`. @@ -37,6 +28,18 @@ One use case for providing multiple connections is to have an initial provisioner connect as the `root` user to set up user accounts, and have subsequent provisioners connect as a user with more limited permissions. +Since the SSH connection type is most often used with +newly-created remote resources, validation of SSH host keys is disabled by +default. In scenarios where this is not acceptable, a separate mechanism for +key distribution could be established and the `host_key` directive documented +below explicitly set to verify against a specific key or signing CA. + +-> **Note:** In Terraform 0.11 and earlier, providers could set default values +for some connection settings, so that `connection` blocks could sometimes be +omitted. This feature was removed in 0.12 in order to make Terraform's behavior +more predictable. + + ## Example usage ```hcl diff --git a/website/docs/language/resources/provisioners/file.mdx b/website/docs/language/resources/provisioners/file.mdx index 83dfb7a90..255814cd7 100644 --- a/website/docs/language/resources/provisioners/file.mdx +++ b/website/docs/language/resources/provisioners/file.mdx @@ -13,8 +13,8 @@ executing Terraform to the newly created resource. The `file` provisioner supports both `ssh` and `winrm` type [connections](/language/resources/provisioners/connection). -> **Note:** Provisioners should only be used as a last resort. For most -common situations there are better alternatives. For more information, see -[the main Provisioners page](/language/resources/provisioners). +common situations there are better alternatives. Refer to +[Declaring Provisioners](/language/resources/provisioners/syntax) for more details. ## Example usage diff --git a/website/docs/language/resources/provisioners/habitat.mdx b/website/docs/language/resources/provisioners/habitat.mdx index c8ab535d1..cdd8f8bf9 100644 --- a/website/docs/language/resources/provisioners/habitat.mdx +++ b/website/docs/language/resources/provisioners/habitat.mdx @@ -9,7 +9,8 @@ description: >- The `habitat` provisioner installs the [Habitat](https://habitat.sh) supervisor and loads configured services. This provisioner only supports Linux targets using the `ssh` connection type at this time. -!> **Note:** This provisioner was removed in the 0.15.0 version of Terraform after being deprecated as of Terraform 0.13.4. For most common situations there are better alternatives to using provisioners. For more information, see [the main Provisioners page](/language/resources/provisioners). +!> **Warning:** This provisioner was removed in the 0.15.0 version of Terraform after being deprecated as of Terraform 0.13.4. For most common situations there are better alternatives to using provisioners. Refer to +[Declaring Provisioners](/language/resources/provisioners/syntax) for more details. ## Requirements diff --git a/website/docs/language/resources/provisioners/index.mdx b/website/docs/language/resources/provisioners/index.mdx deleted file mode 100644 index badd56b09..000000000 --- a/website/docs/language/resources/provisioners/index.mdx +++ /dev/null @@ -1,12 +0,0 @@ ---- -page_title: Provisioners Overview - Configuration Language -description: >- - Provisioners model specific actions on a local or remote machine to prepare - servers or other infrastructure for service. ---- - -# Provisioners - -Provisioners can be used to model specific actions on the local machine or on a -remote machine in order to prepare servers or other infrastructure objects for -service. diff --git a/website/docs/language/resources/provisioners/local-exec.mdx b/website/docs/language/resources/provisioners/local-exec.mdx index 02eec0ef7..8298cfbe7 100644 --- a/website/docs/language/resources/provisioners/local-exec.mdx +++ b/website/docs/language/resources/provisioners/local-exec.mdx @@ -19,8 +19,8 @@ run, there is no guarantee that it will be in an operable state - for example system services such as `sshd` may not be started yet on compute resources. -> **Note:** Provisioners should only be used as a last resort. For most -common situations there are better alternatives. For more information, see -[the main Provisioners page](/language/resources/provisioners). +common situations there are better alternatives. Refer to +[Declaring Provisioners](/language/resources/provisioners/syntax) for more details. ## Example usage diff --git a/website/docs/language/resources/provisioners/null_resource.mdx b/website/docs/language/resources/provisioners/null_resource.mdx index 3e7b3fd65..0ba1294a9 100644 --- a/website/docs/language/resources/provisioners/null_resource.mdx +++ b/website/docs/language/resources/provisioners/null_resource.mdx @@ -19,6 +19,10 @@ details](/language/resources/provisioners/connection) on a `null_resource`. You use its `triggers` argument and any meta-arguments to control exactly where in the dependency graph its provisioners will run. +-> **Note:** Provisioners should only be used as a last resort. For most +common situations there are better alternatives. Refer to +[Declaring Provisioners](/language/resources/provisioners/syntax) for more details. + ## Example usage ```hcl diff --git a/website/docs/language/resources/provisioners/puppet.mdx b/website/docs/language/resources/provisioners/puppet.mdx index e0894c271..5563a369e 100644 --- a/website/docs/language/resources/provisioners/puppet.mdx +++ b/website/docs/language/resources/provisioners/puppet.mdx @@ -11,7 +11,8 @@ The `puppet` provisioner installs, configures and runs the Puppet agent on a remote resource. The `puppet` provisioner supports both `ssh` and `winrm` type [connections](/language/resources/provisioners/connection). -!> **Note:** This provisioner was removed in the 0.15.0 version of Terraform after being deprecated as of Terraform 0.13.4. For most common situations there are better alternatives to using provisioners. For more information, see [the main Provisioners page](/language/resources/provisioners). +!> **Warning:** This provisioner was removed in the 0.15.0 version of Terraform after being deprecated as of Terraform 0.13.4. For most common situations there are better alternatives to using provisioners. Refer to +[Declaring Provisioners](/language/resources/provisioners/syntax) for more details. ## Requirements diff --git a/website/docs/language/resources/provisioners/remote-exec.mdx b/website/docs/language/resources/provisioners/remote-exec.mdx index 4b4d94274..89ce5c109 100644 --- a/website/docs/language/resources/provisioners/remote-exec.mdx +++ b/website/docs/language/resources/provisioners/remote-exec.mdx @@ -18,8 +18,8 @@ provisioner requires a [connection](/language/resources/provisioners/connection) and supports both `ssh` and `winrm`. -> **Note:** Provisioners should only be used as a last resort. For most -common situations there are better alternatives. For more information, see -[the main Provisioners page](/language/resources/provisioners). +common situations there are better alternatives. Refer to +[Declaring Provisioners](/language/resources/provisioners/syntax) for more details. ## Example usage @@ -27,7 +27,7 @@ common situations there are better alternatives. For more information, see resource "aws_instance" "web" { # ... - # Establishes connection to be used by all + # Establishes connection to be used by all # generic remote provisioners (i.e. file/remote-exec) connection { type = "ssh" diff --git a/website/docs/language/resources/provisioners/salt-masterless.mdx b/website/docs/language/resources/provisioners/salt-masterless.mdx index 70fd17dbe..45aade11f 100644 --- a/website/docs/language/resources/provisioners/salt-masterless.mdx +++ b/website/docs/language/resources/provisioners/salt-masterless.mdx @@ -12,7 +12,8 @@ Type: `salt-masterless` The `salt-masterless` Terraform provisioner provisions machines built by Terraform using [Salt](http://saltstack.com/) states, without connecting to a Salt master. The `salt-masterless` provisioner supports `ssh` [connections](/language/resources/provisioners/connection). -!> **Note:** This provisioner was removed in the 0.15.0 version of Terraform after being deprecated as of Terraform 0.13.4. For most common situations there are better alternatives to using provisioners. For more information, see [the main Provisioners page](/language/resources/provisioners). +!> **Warning:** This provisioner was removed in the 0.15.0 version of Terraform after being deprecated as of Terraform 0.13.4. For most common situations there are better alternatives to using provisioners. Refer to +[Declaring Provisioners](/language/resources/provisioners/syntax) for more details. ## Requirements From bfefb7405eefdbc8f74e82669148eb1379c0ec73 Mon Sep 17 00:00:00 2001 From: Laura Pacilio <83350965+laurapacilio@users.noreply.github.com> Date: Wed, 5 Jan 2022 11:09:43 -0500 Subject: [PATCH 54/68] Update layout file to remove deleted page --- website/layouts/language.erb | 4 ---- 1 file changed, 4 deletions(-) diff --git a/website/layouts/language.erb b/website/layouts/language.erb index b2233530f..ee9d01774 100644 --- a/website/layouts/language.erb +++ b/website/layouts/language.erb @@ -85,10 +85,6 @@
  • Provisioners