Merge branch 'v0.10.7-branch' into stable-website
This commit is contained in:
commit
e34eb38672
|
@ -27,6 +27,7 @@ install:
|
|||
script:
|
||||
- make vendor-status
|
||||
- make test
|
||||
- make e2etest
|
||||
- make vet
|
||||
- GOOS=windows go build
|
||||
branches:
|
||||
|
|
20
CHANGELOG.md
20
CHANGELOG.md
|
@ -1,3 +1,22 @@
|
|||
## 0.10.7 (October 2, 2017)
|
||||
|
||||
NEW FEATURES:
|
||||
|
||||
* Provider plugins can now optionally be cached in a shared directory to avoid re-downloading them for each configuration working directory. For more information, see [the documentation](https://github.com/hashicorp/terraform/blob/34956cd12449cb77db3f55e3286cd369e8332eeb/website/docs/configuration/providers.html.md#provider-plugin-cache). ([#16000](https://github.com/hashicorp/terraform/issues/16000))
|
||||
|
||||
IMPROVEMENTS:
|
||||
|
||||
* config: New `abs` interpolation function, returning the absolute value of a number ([#16168](https://github.com/hashicorp/terraform/issues/16168))
|
||||
* config: New `transpose` interpolation function, which swaps the keys and values in a map of lists of strings. ([#16192](https://github.com/hashicorp/terraform/issues/16192))
|
||||
* cli: The Terraform CLI now supports tab-completion for commands and certain arguments for `bash` and `zsh` users. See [the tab-completion docs](https://github.com/hashicorp/terraform/blob/2c782e60fad78e6fc976d850162322608f074e57/website/docs/commands/index.html.markdown#shell-tab-completion) for information on how to enable it. ([#16176](https://github.com/hashicorp/terraform/issues/16176))
|
||||
* cli: `terraform state rm` now includes in its output the count of resources that were removed from the state. ([#16137](https://github.com/hashicorp/terraform/issues/16137))
|
||||
|
||||
BUG FIXES:
|
||||
|
||||
* modules: Update go-getter to fix crash when fetching invalid source subdir ([#16161](https://github.com/hashicorp/terraform/issues/16161))
|
||||
* modules: Fix regression in the handling of modules sourcing other modules with relative paths ([#16160](https://github.com/hashicorp/terraform/issues/16160))
|
||||
* core: Skip local value interpolation during destroy ([#16213](https://github.com/hashicorp/terraform/issues/16213))
|
||||
|
||||
## 0.10.6 (September 19, 2017)
|
||||
|
||||
UPGRADE NOTES:
|
||||
|
@ -9,6 +28,7 @@ UPGRADE NOTES:
|
|||
|
||||
IMPROVEMENTS:
|
||||
|
||||
* Modules can now be installed from [the Terraform Registry](https://registry.terraform.io/)
|
||||
* cli: `terraform import` now accepts an option `-allow-missing-config` that overrides the default requirement that a configuration block must already be present for the resource being imported. ([#15876](https://github.com/hashicorp/terraform/issues/15876))
|
||||
|
||||
## 0.10.5 (September 14, 2017)
|
||||
|
|
8
Makefile
8
Makefile
|
@ -42,6 +42,12 @@ testacc: fmtcheck generate
|
|||
fi
|
||||
TF_ACC=1 go test $(TEST) -v $(TESTARGS) -timeout 120m
|
||||
|
||||
# e2etest runs the end-to-end tests against a generated Terraform binary
|
||||
# The TF_ACC here allows network access, but does not require any special
|
||||
# credentials since the e2etests use local-only providers such as "null".
|
||||
e2etest: generate
|
||||
TF_ACC=1 go test -v ./command/e2etest
|
||||
|
||||
test-compile: fmtcheck generate
|
||||
@if [ "$(TEST)" = "./..." ]; then \
|
||||
echo "ERROR: Set TEST to a specific package. For example,"; \
|
||||
|
@ -96,4 +102,4 @@ vendor-status:
|
|||
# under parallel conditions.
|
||||
.NOTPARALLEL:
|
||||
|
||||
.PHONY: bin cover default dev fmt fmtcheck generate plugin-dev quickdev test-compile test testacc testrace tools vendor-status vet
|
||||
.PHONY: bin cover default dev e2etest fmt fmtcheck generate plugin-dev quickdev test-compile test testacc testrace tools vendor-status vet
|
||||
|
|
|
@ -0,0 +1,79 @@
|
|||
package command
|
||||
|
||||
import (
|
||||
"github.com/posener/complete"
|
||||
"github.com/posener/complete/match"
|
||||
)
|
||||
|
||||
// This file contains some re-usable predictors for auto-complete. The
|
||||
// command-specific autocomplete configurations live within each command's
|
||||
// own source file, as AutocompleteArgs and AutocompleteFlags methods on each
|
||||
// Command implementation.
|
||||
|
||||
// For completing the value of boolean flags like -foo false
|
||||
var completePredictBoolean = complete.PredictSet("true", "false")
|
||||
|
||||
// We don't currently have a real predictor for module sources, but
|
||||
// we'll probably add one later.
|
||||
var completePredictModuleSource = complete.PredictAnything
|
||||
|
||||
type completePredictSequence []complete.Predictor
|
||||
|
||||
func (s completePredictSequence) Predict(a complete.Args) []string {
|
||||
// Only one level of command is stripped off the prefix of a.Completed
|
||||
// here, so nested subcommands like "workspace new" will need to provide
|
||||
// dummy entries (e.g. complete.PredictNothing) as placeholders for
|
||||
// all but the first subcommand. For example, "workspace new" needs
|
||||
// one placeholder for the argument "new".
|
||||
idx := len(a.Completed)
|
||||
if idx >= len(s) {
|
||||
return nil
|
||||
}
|
||||
|
||||
return s[idx].Predict(a)
|
||||
}
|
||||
|
||||
func (m *Meta) completePredictWorkspaceName() complete.Predictor {
|
||||
return complete.PredictFunc(func(a complete.Args) []string {
|
||||
// There are lot of things that can fail in here, so if we encounter
|
||||
// any error then we'll just return nothing and not support autocomplete
|
||||
// until whatever error is fixed. (The user can't actually see the error
|
||||
// here, but other commands should produce a user-visible error before
|
||||
// too long.)
|
||||
|
||||
// We assume here that we want to autocomplete for the current working
|
||||
// directory, since we don't have enough context to know where to
|
||||
// find any config path argument, and it might be _after_ the argument
|
||||
// we're trying to complete here anyway.
|
||||
configPath, err := ModulePath(nil)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
cfg, err := m.Config(configPath)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
b, err := m.Backend(&BackendOpts{
|
||||
Config: cfg,
|
||||
})
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
names, _ := b.States()
|
||||
|
||||
if a.Last != "" {
|
||||
// filter for names that match the prefix only
|
||||
filtered := make([]string, 0, len(names))
|
||||
for _, name := range names {
|
||||
if match.Prefix(name, a.Last) {
|
||||
filtered = append(filtered, name)
|
||||
}
|
||||
}
|
||||
names = filtered
|
||||
}
|
||||
return names
|
||||
})
|
||||
}
|
|
@ -0,0 +1,60 @@
|
|||
package command
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/mitchellh/cli"
|
||||
"github.com/posener/complete"
|
||||
)
|
||||
|
||||
func TestMetaCompletePredictWorkspaceName(t *testing.T) {
|
||||
// Create a temporary working directory that is empty
|
||||
td := tempDir(t)
|
||||
os.MkdirAll(td, 0755)
|
||||
defer os.RemoveAll(td)
|
||||
defer testChdir(t, td)()
|
||||
|
||||
// make sure a vars file doesn't interfere
|
||||
err := ioutil.WriteFile(DefaultVarsFilename, nil, 0644)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
ui := new(cli.MockUi)
|
||||
meta := &Meta{Ui: ui}
|
||||
|
||||
predictor := meta.completePredictWorkspaceName()
|
||||
|
||||
t.Run("no prefix", func(t *testing.T) {
|
||||
got := predictor.Predict(complete.Args{
|
||||
Last: "",
|
||||
})
|
||||
want := []string{"default"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Errorf("wrong result\ngot: %#v\nwant: %#v", got, want)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("prefix that matches", func(t *testing.T) {
|
||||
got := predictor.Predict(complete.Args{
|
||||
Last: "def",
|
||||
})
|
||||
want := []string{"default"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Errorf("wrong result\ngot: %#v\nwant: %#v", got, want)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("prefix that doesn't match", func(t *testing.T) {
|
||||
got := predictor.Predict(complete.Args{
|
||||
Last: "x",
|
||||
})
|
||||
want := []string{}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Errorf("wrong result\ngot: %#v\nwant: %#v", got, want)
|
||||
}
|
||||
})
|
||||
}
|
|
@ -0,0 +1,237 @@
|
|||
package e2etest
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/davecgh/go-spew/spew"
|
||||
"github.com/hashicorp/terraform/e2e"
|
||||
)
|
||||
|
||||
// The tests in this file run through different scenarios recommended in our
|
||||
// "Running Terraform in Automation" guide:
|
||||
// https://www.terraform.io/guides/running-terraform-in-automation.html
|
||||
|
||||
// TestPlanApplyInAutomation runs through the "main case" of init, plan, apply
|
||||
// using the specific command line options suggested in the guide.
|
||||
func TestPlanApplyInAutomation(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
// This test reaches out to releases.hashicorp.com to download the
|
||||
// template and null providers, so it can only run if network access is
|
||||
// allowed.
|
||||
skipIfCannotAccessNetwork(t)
|
||||
|
||||
fixturePath := filepath.Join("test-fixtures", "full-workflow-null")
|
||||
tf := e2e.NewBinary(terraformBin, fixturePath)
|
||||
defer tf.Close()
|
||||
|
||||
// We advertise that _any_ non-empty value works, so we'll test something
|
||||
// unconventional here.
|
||||
tf.AddEnv("TF_IN_AUTOMATION=yes-please")
|
||||
|
||||
//// INIT
|
||||
stdout, stderr, err := tf.Run("init", "-input=false")
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected init error: %s\nstderr:\n%s", err, stderr)
|
||||
}
|
||||
|
||||
// Make sure we actually downloaded the plugins, rather than picking up
|
||||
// copies that might be already installed globally on the system.
|
||||
if !strings.Contains(stdout, "- Downloading plugin for provider \"template\"") {
|
||||
t.Errorf("template provider download message is missing from init output:\n%s", stdout)
|
||||
t.Logf("(this can happen if you have a copy of the plugin in one of the global plugin search dirs)")
|
||||
}
|
||||
if !strings.Contains(stdout, "- Downloading plugin for provider \"null\"") {
|
||||
t.Errorf("null provider download message is missing from init output:\n%s", stdout)
|
||||
t.Logf("(this can happen if you have a copy of the plugin in one of the global plugin search dirs)")
|
||||
}
|
||||
|
||||
//// PLAN
|
||||
stdout, stderr, err = tf.Run("plan", "-out=tfplan", "-input=false")
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected plan error: %s\nstderr:\n%s", err, stderr)
|
||||
}
|
||||
|
||||
if !strings.Contains(stdout, "1 to add, 0 to change, 0 to destroy") {
|
||||
t.Errorf("incorrect plan tally; want 1 to add:\n%s", stdout)
|
||||
}
|
||||
|
||||
// Because we're running with TF_IN_AUTOMATION set, we should not see
|
||||
// any mention of the plan file in the output.
|
||||
if strings.Contains(stdout, "tfplan") {
|
||||
t.Errorf("unwanted mention of \"tfplan\" file in plan output\n%s", stdout)
|
||||
}
|
||||
|
||||
plan, err := tf.Plan("tfplan")
|
||||
if err != nil {
|
||||
t.Fatalf("failed to read plan file: %s", err)
|
||||
}
|
||||
|
||||
stateResources := plan.State.RootModule().Resources
|
||||
diffResources := plan.Diff.RootModule().Resources
|
||||
|
||||
if len(stateResources) != 1 || stateResources["data.template_file.test"] == nil {
|
||||
t.Errorf("incorrect state in plan; want just data.template_file.test to have been rendered, but have:\n%s", spew.Sdump(stateResources))
|
||||
}
|
||||
if len(diffResources) != 1 || diffResources["null_resource.test"] == nil {
|
||||
t.Errorf("incorrect diff in plan; want just null_resource.test to have been rendered, but have:\n%s", spew.Sdump(diffResources))
|
||||
}
|
||||
|
||||
//// APPLY
|
||||
stdout, stderr, err = tf.Run("apply", "-input=false", "tfplan")
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected apply error: %s\nstderr:\n%s", err, stderr)
|
||||
}
|
||||
|
||||
if !strings.Contains(stdout, "Resources: 1 added, 0 changed, 0 destroyed") {
|
||||
t.Errorf("incorrect apply tally; want 1 added:\n%s", stdout)
|
||||
}
|
||||
|
||||
state, err := tf.LocalState()
|
||||
if err != nil {
|
||||
t.Fatalf("failed to read state file: %s", err)
|
||||
}
|
||||
|
||||
stateResources = state.RootModule().Resources
|
||||
var gotResources []string
|
||||
for n := range stateResources {
|
||||
gotResources = append(gotResources, n)
|
||||
}
|
||||
sort.Strings(gotResources)
|
||||
|
||||
wantResources := []string{
|
||||
"data.template_file.test",
|
||||
"null_resource.test",
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(gotResources, wantResources) {
|
||||
t.Errorf("wrong resources in state\ngot: %#v\nwant: %#v", gotResources, wantResources)
|
||||
}
|
||||
}
|
||||
|
||||
// TestAutoApplyInAutomation tests the scenario where the caller skips creating
|
||||
// an explicit plan and instead forces automatic application of changes.
|
||||
func TestAutoApplyInAutomation(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
// This test reaches out to releases.hashicorp.com to download the
|
||||
// template and null providers, so it can only run if network access is
|
||||
// allowed.
|
||||
skipIfCannotAccessNetwork(t)
|
||||
|
||||
fixturePath := filepath.Join("test-fixtures", "full-workflow-null")
|
||||
tf := e2e.NewBinary(terraformBin, fixturePath)
|
||||
defer tf.Close()
|
||||
|
||||
// We advertise that _any_ non-empty value works, so we'll test something
|
||||
// unconventional here.
|
||||
tf.AddEnv("TF_IN_AUTOMATION=very-much-so")
|
||||
|
||||
//// INIT
|
||||
stdout, stderr, err := tf.Run("init", "-input=false")
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected init error: %s\nstderr:\n%s", err, stderr)
|
||||
}
|
||||
|
||||
// Make sure we actually downloaded the plugins, rather than picking up
|
||||
// copies that might be already installed globally on the system.
|
||||
if !strings.Contains(stdout, "- Downloading plugin for provider \"template\"") {
|
||||
t.Errorf("template provider download message is missing from init output:\n%s", stdout)
|
||||
t.Logf("(this can happen if you have a copy of the plugin in one of the global plugin search dirs)")
|
||||
}
|
||||
if !strings.Contains(stdout, "- Downloading plugin for provider \"null\"") {
|
||||
t.Errorf("null provider download message is missing from init output:\n%s", stdout)
|
||||
t.Logf("(this can happen if you have a copy of the plugin in one of the global plugin search dirs)")
|
||||
}
|
||||
|
||||
//// APPLY
|
||||
stdout, stderr, err = tf.Run("apply", "-input=false", "-auto-approve=true")
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected apply error: %s\nstderr:\n%s", err, stderr)
|
||||
}
|
||||
|
||||
if !strings.Contains(stdout, "Resources: 1 added, 0 changed, 0 destroyed") {
|
||||
t.Errorf("incorrect apply tally; want 1 added:\n%s", stdout)
|
||||
}
|
||||
|
||||
state, err := tf.LocalState()
|
||||
if err != nil {
|
||||
t.Fatalf("failed to read state file: %s", err)
|
||||
}
|
||||
|
||||
stateResources := state.RootModule().Resources
|
||||
var gotResources []string
|
||||
for n := range stateResources {
|
||||
gotResources = append(gotResources, n)
|
||||
}
|
||||
sort.Strings(gotResources)
|
||||
|
||||
wantResources := []string{
|
||||
"data.template_file.test",
|
||||
"null_resource.test",
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(gotResources, wantResources) {
|
||||
t.Errorf("wrong resources in state\ngot: %#v\nwant: %#v", gotResources, wantResources)
|
||||
}
|
||||
}
|
||||
|
||||
// TestPlanOnlyInAutomation tests the scenario of creating a "throwaway" plan,
|
||||
// which we recommend as a way to verify a pull request.
|
||||
func TestPlanOnlyInAutomation(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
// This test reaches out to releases.hashicorp.com to download the
|
||||
// template and null providers, so it can only run if network access is
|
||||
// allowed.
|
||||
skipIfCannotAccessNetwork(t)
|
||||
|
||||
fixturePath := filepath.Join("test-fixtures", "full-workflow-null")
|
||||
tf := e2e.NewBinary(terraformBin, fixturePath)
|
||||
defer tf.Close()
|
||||
|
||||
// We advertise that _any_ non-empty value works, so we'll test something
|
||||
// unconventional here.
|
||||
tf.AddEnv("TF_IN_AUTOMATION=verily")
|
||||
|
||||
//// INIT
|
||||
stdout, stderr, err := tf.Run("init", "-input=false")
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected init error: %s\nstderr:\n%s", err, stderr)
|
||||
}
|
||||
|
||||
// Make sure we actually downloaded the plugins, rather than picking up
|
||||
// copies that might be already installed globally on the system.
|
||||
if !strings.Contains(stdout, "- Downloading plugin for provider \"template\"") {
|
||||
t.Errorf("template provider download message is missing from init output:\n%s", stdout)
|
||||
t.Logf("(this can happen if you have a copy of the plugin in one of the global plugin search dirs)")
|
||||
}
|
||||
if !strings.Contains(stdout, "- Downloading plugin for provider \"null\"") {
|
||||
t.Errorf("null provider download message is missing from init output:\n%s", stdout)
|
||||
t.Logf("(this can happen if you have a copy of the plugin in one of the global plugin search dirs)")
|
||||
}
|
||||
|
||||
//// PLAN
|
||||
stdout, stderr, err = tf.Run("plan", "-input=false")
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected plan error: %s\nstderr:\n%s", err, stderr)
|
||||
}
|
||||
|
||||
if !strings.Contains(stdout, "1 to add, 0 to change, 0 to destroy") {
|
||||
t.Errorf("incorrect plan tally; want 1 to add:\n%s", stdout)
|
||||
}
|
||||
|
||||
// Because we're running with TF_IN_AUTOMATION set, we should not see
|
||||
// any mention of the the "terraform apply" command in the output.
|
||||
if strings.Contains(stdout, "terraform apply") {
|
||||
t.Errorf("unwanted mention of \"terraform apply\" in plan output\n%s", stdout)
|
||||
}
|
||||
|
||||
if tf.FileExists("tfplan") {
|
||||
t.Error("plan file was created, but was not expected")
|
||||
}
|
||||
}
|
|
@ -1,7 +1,11 @@
|
|||
package e2etest
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
|
@ -45,3 +49,55 @@ func TestInitProviders(t *testing.T) {
|
|||
}
|
||||
|
||||
}
|
||||
|
||||
func TestInitProviders_pluginCache(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
// This test reaches out to releases.hashicorp.com to access plugin
|
||||
// metadata, and download the null plugin, though the template plugin
|
||||
// should come from local cache.
|
||||
skipIfCannotAccessNetwork(t)
|
||||
|
||||
fixturePath := filepath.Join("test-fixtures", "plugin-cache")
|
||||
tf := e2e.NewBinary(terraformBin, fixturePath)
|
||||
defer tf.Close()
|
||||
|
||||
// Our fixture dir has a generic os_arch dir, which we need to customize
|
||||
// to the actual OS/arch where this test is running in order to get the
|
||||
// desired result.
|
||||
fixtMachineDir := tf.Path("cache/os_arch")
|
||||
wantMachineDir := tf.Path("cache", fmt.Sprintf("%s_%s", runtime.GOOS, runtime.GOARCH))
|
||||
os.Rename(fixtMachineDir, wantMachineDir)
|
||||
|
||||
cmd := tf.Cmd("init")
|
||||
cmd.Env = append(cmd.Env, "TF_PLUGIN_CACHE_DIR=./cache")
|
||||
cmd.Stdin = nil
|
||||
cmd.Stderr = &bytes.Buffer{}
|
||||
|
||||
err := cmd.Run()
|
||||
if err != nil {
|
||||
t.Errorf("unexpected error: %s", err)
|
||||
}
|
||||
|
||||
stderr := cmd.Stderr.(*bytes.Buffer).String()
|
||||
if stderr != "" {
|
||||
t.Errorf("unexpected stderr output:\n%s", stderr)
|
||||
}
|
||||
|
||||
path := fmt.Sprintf(".terraform/plugins/%s_%s/terraform-provider-template_v0.1.0_x4", runtime.GOOS, runtime.GOARCH)
|
||||
content, err := tf.ReadFile(path)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to read installed plugin from %s: %s", path, err)
|
||||
}
|
||||
if strings.TrimSpace(string(content)) != "this is not a real plugin" {
|
||||
t.Errorf("template plugin was not installed from local cache")
|
||||
}
|
||||
|
||||
if !tf.FileExists(fmt.Sprintf(".terraform/plugins/%s_%s/terraform-provider-null_v0.1.0_x4", runtime.GOOS, runtime.GOARCH)) {
|
||||
t.Errorf("null plugin was not installed")
|
||||
}
|
||||
|
||||
if !tf.FileExists(fmt.Sprintf("cache/%s_%s/terraform-provider-null_v0.1.0_x4", runtime.GOOS, runtime.GOARCH)) {
|
||||
t.Errorf("null plugin is not in cache after install")
|
||||
}
|
||||
}
|
||||
|
|
|
@ -57,6 +57,13 @@ func TestPrimarySeparatePlan(t *testing.T) {
|
|||
t.Errorf("incorrect plan tally; want 1 to add:\n%s", stdout)
|
||||
}
|
||||
|
||||
if !strings.Contains(stdout, "This plan was saved to: tfplan") {
|
||||
t.Errorf("missing \"This plan was saved to...\" message in plan output\n%s", stdout)
|
||||
}
|
||||
if !strings.Contains(stdout, "terraform apply \"tfplan\"") {
|
||||
t.Errorf("missing next-step instruction in plan output\n%s", stdout)
|
||||
}
|
||||
|
||||
plan, err := tf.Plan("tfplan")
|
||||
if err != nil {
|
||||
t.Fatalf("failed to read plan file: %s", err)
|
||||
|
@ -109,8 +116,8 @@ func TestPrimarySeparatePlan(t *testing.T) {
|
|||
t.Fatalf("unexpected destroy error: %s\nstderr:\n%s", err, stderr)
|
||||
}
|
||||
|
||||
if !strings.Contains(stdout, "Resources: 2 destroyed") {
|
||||
t.Errorf("incorrect destroy tally; want 2 destroyed:\n%s", stdout)
|
||||
if !strings.Contains(stdout, "Resources: 1 destroyed") {
|
||||
t.Errorf("incorrect destroy tally; want 1 destroyed:\n%s", stdout)
|
||||
}
|
||||
|
||||
state, err = tf.LocalState()
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
this is not a real plugin
|
|
@ -0,0 +1,7 @@
|
|||
provider "template" {
|
||||
version = "0.1.0"
|
||||
}
|
||||
|
||||
provider "null" {
|
||||
version = "0.1.0"
|
||||
}
|
|
@ -7,6 +7,8 @@ import (
|
|||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/posener/complete"
|
||||
|
||||
"github.com/hashicorp/go-getter"
|
||||
|
||||
multierror "github.com/hashicorp/go-multierror"
|
||||
|
@ -69,10 +71,11 @@ func (c *InitCommand) Run(args []string) int {
|
|||
c.getPlugins = false
|
||||
}
|
||||
|
||||
// set getProvider if we don't have a test version already
|
||||
// set providerInstaller if we don't have a test version already
|
||||
if c.providerInstaller == nil {
|
||||
c.providerInstaller = &discovery.ProviderInstaller{
|
||||
Dir: c.pluginDir(),
|
||||
Dir: c.pluginDir(),
|
||||
Cache: c.pluginCache(),
|
||||
PluginProtocolVersion: plugin.Handshake.ProtocolVersion,
|
||||
SkipVerify: !flagVerifyPlugins,
|
||||
Ui: c.Ui,
|
||||
|
@ -452,6 +455,29 @@ func (c *InitCommand) getProviders(path string, state *terraform.State, upgrade
|
|||
return nil
|
||||
}
|
||||
|
||||
func (c *InitCommand) AutocompleteArgs() complete.Predictor {
|
||||
return complete.PredictDirs("")
|
||||
}
|
||||
|
||||
func (c *InitCommand) AutocompleteFlags() complete.Flags {
|
||||
return complete.Flags{
|
||||
"-backend": completePredictBoolean,
|
||||
"-backend-config": complete.PredictFiles("*.tfvars"), // can also be key=value, but we can't "predict" that
|
||||
"-force-copy": complete.PredictNothing,
|
||||
"-from-module": completePredictModuleSource,
|
||||
"-get": completePredictBoolean,
|
||||
"-get-plugins": completePredictBoolean,
|
||||
"-input": completePredictBoolean,
|
||||
"-lock": completePredictBoolean,
|
||||
"-lock-timeout": complete.PredictAnything,
|
||||
"-no-color": complete.PredictNothing,
|
||||
"-plugin-dir": complete.PredictDirs(""),
|
||||
"-reconfigure": complete.PredictNothing,
|
||||
"-upgrade": completePredictBoolean,
|
||||
"-verify-plugins": completePredictBoolean,
|
||||
}
|
||||
}
|
||||
|
||||
func (c *InitCommand) Help() string {
|
||||
helpText := `
|
||||
Usage: terraform init [options] [DIR]
|
||||
|
|
|
@ -55,6 +55,10 @@ type Meta struct {
|
|||
// the specific commands being run.
|
||||
RunningInAutomation bool
|
||||
|
||||
// PluginCacheDir, if non-empty, enables caching of downloaded plugins
|
||||
// into the given directory.
|
||||
PluginCacheDir string
|
||||
|
||||
//----------------------------------------------------------
|
||||
// Protected: commands can set these
|
||||
//----------------------------------------------------------
|
||||
|
|
|
@ -168,6 +168,17 @@ func (m *Meta) pluginDirs(includeAutoInstalled bool) []string {
|
|||
return dirs
|
||||
}
|
||||
|
||||
func (m *Meta) pluginCache() discovery.PluginCache {
|
||||
dir := m.PluginCacheDir
|
||||
if dir == "" {
|
||||
return nil // cache disabled
|
||||
}
|
||||
|
||||
dir = filepath.Join(dir, pluginMachineName)
|
||||
|
||||
return discovery.NewLocalPluginCache(dir)
|
||||
}
|
||||
|
||||
// providerPluginSet returns the set of valid providers that were discovered in
|
||||
// the defined search paths.
|
||||
func (m *Meta) providerPluginSet() discovery.PluginMetaSet {
|
||||
|
|
|
@ -52,6 +52,8 @@ func (c *StateRmCommand) Run(args []string) int {
|
|||
return 1
|
||||
}
|
||||
|
||||
c.Ui.Output(fmt.Sprintf("%d items removed.", len(args)))
|
||||
|
||||
if err := state.WriteState(stateReal); err != nil {
|
||||
c.Ui.Error(fmt.Sprintf(errStateRmPersist, err))
|
||||
return 1
|
||||
|
|
|
@ -8,6 +8,7 @@ import (
|
|||
"github.com/hashicorp/terraform/command/clistate"
|
||||
"github.com/hashicorp/terraform/state"
|
||||
"github.com/mitchellh/cli"
|
||||
"github.com/posener/complete"
|
||||
)
|
||||
|
||||
type WorkspaceDeleteCommand struct {
|
||||
|
@ -156,6 +157,21 @@ func (c *WorkspaceDeleteCommand) Run(args []string) int {
|
|||
|
||||
return 0
|
||||
}
|
||||
|
||||
func (c *WorkspaceDeleteCommand) AutocompleteArgs() complete.Predictor {
|
||||
return completePredictSequence{
|
||||
complete.PredictNothing, // the "select" subcommand itself (already matched)
|
||||
c.completePredictWorkspaceName(),
|
||||
complete.PredictDirs(""),
|
||||
}
|
||||
}
|
||||
|
||||
func (c *WorkspaceDeleteCommand) AutocompleteFlags() complete.Flags {
|
||||
return complete.Flags{
|
||||
"-force": complete.PredictNothing,
|
||||
}
|
||||
}
|
||||
|
||||
func (c *WorkspaceDeleteCommand) Help() string {
|
||||
helpText := `
|
||||
Usage: terraform workspace delete [OPTIONS] NAME [DIR]
|
||||
|
|
|
@ -4,6 +4,8 @@ import (
|
|||
"bytes"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/posener/complete"
|
||||
)
|
||||
|
||||
type WorkspaceListCommand struct {
|
||||
|
@ -75,6 +77,14 @@ func (c *WorkspaceListCommand) Run(args []string) int {
|
|||
return 0
|
||||
}
|
||||
|
||||
func (c *WorkspaceListCommand) AutocompleteArgs() complete.Predictor {
|
||||
return complete.PredictDirs("")
|
||||
}
|
||||
|
||||
func (c *WorkspaceListCommand) AutocompleteFlags() complete.Flags {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *WorkspaceListCommand) Help() string {
|
||||
helpText := `
|
||||
Usage: terraform workspace list [DIR]
|
||||
|
|
|
@ -10,6 +10,7 @@ import (
|
|||
"github.com/hashicorp/terraform/state"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
"github.com/mitchellh/cli"
|
||||
"github.com/posener/complete"
|
||||
)
|
||||
|
||||
type WorkspaceNewCommand struct {
|
||||
|
@ -156,6 +157,20 @@ func (c *WorkspaceNewCommand) Run(args []string) int {
|
|||
return 0
|
||||
}
|
||||
|
||||
func (c *WorkspaceNewCommand) AutocompleteArgs() complete.Predictor {
|
||||
return completePredictSequence{
|
||||
complete.PredictNothing, // the "new" subcommand itself (already matched)
|
||||
complete.PredictAnything,
|
||||
complete.PredictDirs(""),
|
||||
}
|
||||
}
|
||||
|
||||
func (c *WorkspaceNewCommand) AutocompleteFlags() complete.Flags {
|
||||
return complete.Flags{
|
||||
"-state": complete.PredictFiles("*.tfstate"),
|
||||
}
|
||||
}
|
||||
|
||||
func (c *WorkspaceNewCommand) Help() string {
|
||||
helpText := `
|
||||
Usage: terraform workspace new [OPTIONS] NAME [DIR]
|
||||
|
|
|
@ -5,6 +5,7 @@ import (
|
|||
"strings"
|
||||
|
||||
"github.com/mitchellh/cli"
|
||||
"github.com/posener/complete"
|
||||
)
|
||||
|
||||
type WorkspaceSelectCommand struct {
|
||||
|
@ -103,6 +104,18 @@ func (c *WorkspaceSelectCommand) Run(args []string) int {
|
|||
return 0
|
||||
}
|
||||
|
||||
func (c *WorkspaceSelectCommand) AutocompleteArgs() complete.Predictor {
|
||||
return completePredictSequence{
|
||||
complete.PredictNothing, // the "select" subcommand itself (already matched)
|
||||
c.completePredictWorkspaceName(),
|
||||
complete.PredictDirs(""),
|
||||
}
|
||||
}
|
||||
|
||||
func (c *WorkspaceSelectCommand) AutocompleteFlags() complete.Flags {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *WorkspaceSelectCommand) Help() string {
|
||||
helpText := `
|
||||
Usage: terraform workspace select NAME [DIR]
|
||||
|
|
|
@ -2,6 +2,8 @@ package command
|
|||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/posener/complete"
|
||||
)
|
||||
|
||||
type WorkspaceShowCommand struct {
|
||||
|
@ -26,6 +28,14 @@ func (c *WorkspaceShowCommand) Run(args []string) int {
|
|||
return 0
|
||||
}
|
||||
|
||||
func (c *WorkspaceShowCommand) AutocompleteArgs() complete.Predictor {
|
||||
return complete.PredictNothing
|
||||
}
|
||||
|
||||
func (c *WorkspaceShowCommand) AutocompleteFlags() complete.Flags {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *WorkspaceShowCommand) Help() string {
|
||||
helpText := `
|
||||
Usage: terraform workspace show
|
||||
|
|
11
commands.go
11
commands.go
|
@ -25,15 +25,7 @@ const (
|
|||
OutputPrefix = "o:"
|
||||
)
|
||||
|
||||
func init() {
|
||||
Ui = &cli.PrefixedUi{
|
||||
AskPrefix: OutputPrefix,
|
||||
OutputPrefix: OutputPrefix,
|
||||
InfoPrefix: OutputPrefix,
|
||||
ErrorPrefix: ErrorPrefix,
|
||||
Ui: &cli.BasicUi{Writer: os.Stdout},
|
||||
}
|
||||
|
||||
func initCommands(config *Config) {
|
||||
var inAutomation bool
|
||||
if v := os.Getenv(runningInAutomationEnvName); v != "" {
|
||||
inAutomation = true
|
||||
|
@ -46,6 +38,7 @@ func init() {
|
|||
Ui: Ui,
|
||||
|
||||
RunningInAutomation: inAutomation,
|
||||
PluginCacheDir: config.PluginCacheDir,
|
||||
}
|
||||
|
||||
// The command list is included in the terraform -help
|
||||
|
|
33
config.go
33
config.go
|
@ -11,6 +11,8 @@ import (
|
|||
"github.com/hashicorp/terraform/command"
|
||||
)
|
||||
|
||||
const pluginCacheDirEnvVar = "TF_PLUGIN_CACHE_DIR"
|
||||
|
||||
// Config is the structure of the configuration for the Terraform CLI.
|
||||
//
|
||||
// This is not the configuration for Terraform itself. That is in the
|
||||
|
@ -21,6 +23,10 @@ type Config struct {
|
|||
|
||||
DisableCheckpoint bool `hcl:"disable_checkpoint"`
|
||||
DisableCheckpointSignature bool `hcl:"disable_checkpoint_signature"`
|
||||
|
||||
// If set, enables local caching of plugins in this directory to
|
||||
// avoid repeatedly re-downloading over the Internet.
|
||||
PluginCacheDir string `hcl:"plugin_cache_dir"`
|
||||
}
|
||||
|
||||
// BuiltinConfig is the built-in defaults for the configuration. These
|
||||
|
@ -75,9 +81,31 @@ func LoadConfig(path string) (*Config, error) {
|
|||
result.Provisioners[k] = os.ExpandEnv(v)
|
||||
}
|
||||
|
||||
if result.PluginCacheDir != "" {
|
||||
result.PluginCacheDir = os.ExpandEnv(result.PluginCacheDir)
|
||||
}
|
||||
|
||||
return &result, nil
|
||||
}
|
||||
|
||||
// EnvConfig returns a Config populated from environment variables.
|
||||
//
|
||||
// Any values specified in this config should override those set in the
|
||||
// configuration file.
|
||||
func EnvConfig() *Config {
|
||||
config := &Config{}
|
||||
|
||||
if envPluginCacheDir := os.Getenv(pluginCacheDirEnvVar); envPluginCacheDir != "" {
|
||||
// No Expandenv here, because expanding environment variables inside
|
||||
// an environment variable would be strange and seems unnecessary.
|
||||
// (User can expand variables into the value while setting it using
|
||||
// standard shell features.)
|
||||
config.PluginCacheDir = envPluginCacheDir
|
||||
}
|
||||
|
||||
return config
|
||||
}
|
||||
|
||||
// Merge merges two configurations and returns a third entirely
|
||||
// new configuration with the two merged.
|
||||
func (c1 *Config) Merge(c2 *Config) *Config {
|
||||
|
@ -105,5 +133,10 @@ func (c1 *Config) Merge(c2 *Config) *Config {
|
|||
result.DisableCheckpoint = c1.DisableCheckpoint || c2.DisableCheckpoint
|
||||
result.DisableCheckpointSignature = c1.DisableCheckpointSignature || c2.DisableCheckpointSignature
|
||||
|
||||
result.PluginCacheDir = c1.PluginCacheDir
|
||||
if result.PluginCacheDir == "" {
|
||||
result.PluginCacheDir = c2.PluginCacheDir
|
||||
}
|
||||
|
||||
return &result
|
||||
}
|
||||
|
|
|
@ -58,6 +58,7 @@ func listVariableValueToStringSlice(values []ast.Variable) ([]string, error) {
|
|||
// Funcs is the mapping of built-in functions for configuration.
|
||||
func Funcs() map[string]ast.Function {
|
||||
return map[string]ast.Function{
|
||||
"abs": interpolationFuncAbs(),
|
||||
"basename": interpolationFuncBasename(),
|
||||
"base64decode": interpolationFuncBase64Decode(),
|
||||
"base64encode": interpolationFuncBase64Encode(),
|
||||
|
@ -111,6 +112,7 @@ func Funcs() map[string]ast.Function {
|
|||
"substr": interpolationFuncSubstr(),
|
||||
"timestamp": interpolationFuncTimestamp(),
|
||||
"title": interpolationFuncTitle(),
|
||||
"transpose": interpolationFuncTranspose(),
|
||||
"trimspace": interpolationFuncTrimSpace(),
|
||||
"upper": interpolationFuncUpper(),
|
||||
"urlencode": interpolationFuncURLEncode(),
|
||||
|
@ -1546,3 +1548,61 @@ func interpolationFuncURLEncode() ast.Function {
|
|||
},
|
||||
}
|
||||
}
|
||||
|
||||
// interpolationFuncTranspose implements the "transpose" function
|
||||
// that converts a map (string,list) to a map (string,list) where
|
||||
// the unique values of the original lists become the keys of the
|
||||
// new map and the keys of the original map become values for the
|
||||
// corresponding new keys.
|
||||
func interpolationFuncTranspose() ast.Function {
|
||||
return ast.Function{
|
||||
ArgTypes: []ast.Type{ast.TypeMap},
|
||||
ReturnType: ast.TypeMap,
|
||||
Callback: func(args []interface{}) (interface{}, error) {
|
||||
|
||||
inputMap := args[0].(map[string]ast.Variable)
|
||||
outputMap := make(map[string]ast.Variable)
|
||||
tmpMap := make(map[string][]string)
|
||||
|
||||
for inKey, inVal := range inputMap {
|
||||
if inVal.Type != ast.TypeList {
|
||||
return nil, fmt.Errorf("transpose requires a map of lists of strings")
|
||||
}
|
||||
values := inVal.Value.([]ast.Variable)
|
||||
for _, listVal := range values {
|
||||
if listVal.Type != ast.TypeString {
|
||||
return nil, fmt.Errorf("transpose requires the given map values to be lists of strings")
|
||||
}
|
||||
outKey := listVal.Value.(string)
|
||||
if _, ok := tmpMap[outKey]; !ok {
|
||||
tmpMap[outKey] = make([]string, 0)
|
||||
}
|
||||
outVal := tmpMap[outKey]
|
||||
outVal = append(outVal, inKey)
|
||||
sort.Strings(outVal)
|
||||
tmpMap[outKey] = outVal
|
||||
}
|
||||
}
|
||||
|
||||
for outKey, outVal := range tmpMap {
|
||||
values := make([]ast.Variable, 0)
|
||||
for _, v := range outVal {
|
||||
values = append(values, ast.Variable{Type: ast.TypeString, Value: v})
|
||||
}
|
||||
outputMap[outKey] = ast.Variable{Type: ast.TypeList, Value: values}
|
||||
}
|
||||
return outputMap, nil
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// interpolationFuncAbs returns the absolute value of a given float.
|
||||
func interpolationFuncAbs() ast.Function {
|
||||
return ast.Function{
|
||||
ArgTypes: []ast.Type{ast.TypeFloat},
|
||||
ReturnType: ast.TypeFloat,
|
||||
Callback: func(args []interface{}) (interface{}, error) {
|
||||
return math.Abs(args[0].(float64)), nil
|
||||
},
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2400,21 +2400,23 @@ type testFunctionCase struct {
|
|||
}
|
||||
|
||||
func testFunction(t *testing.T, config testFunctionConfig) {
|
||||
for i, tc := range config.Cases {
|
||||
ast, err := hil.Parse(tc.Input)
|
||||
if err != nil {
|
||||
t.Fatalf("Case #%d: input: %#v\nerr: %v", i, tc.Input, err)
|
||||
}
|
||||
t.Helper()
|
||||
for _, tc := range config.Cases {
|
||||
t.Run(tc.Input, func(t *testing.T) {
|
||||
ast, err := hil.Parse(tc.Input)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected parse error: %s", err)
|
||||
}
|
||||
|
||||
result, err := hil.Eval(ast, langEvalConfig(config.Vars))
|
||||
if err != nil != tc.Error {
|
||||
t.Fatalf("Case #%d:\ninput: %#v\nerr: %v", i, tc.Input, err)
|
||||
}
|
||||
result, err := hil.Eval(ast, langEvalConfig(config.Vars))
|
||||
if err != nil != tc.Error {
|
||||
t.Fatalf("unexpected eval error: %s", err)
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(result.Value, tc.Result) {
|
||||
t.Fatalf("%d: bad output for input: %s\n\nOutput: %#v\nExpected: %#v",
|
||||
i, tc.Input, result.Value, tc.Result)
|
||||
}
|
||||
if !reflect.DeepEqual(result.Value, tc.Result) {
|
||||
t.Errorf("wrong result\ngiven: %s\ngot: %#v\nwant: %#v", tc.Input, result.Value, tc.Result)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2612,3 +2614,131 @@ func TestInterpolateFuncURLEncode(t *testing.T) {
|
|||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestInterpolateFuncTranspose(t *testing.T) {
|
||||
testFunction(t, testFunctionConfig{
|
||||
Vars: map[string]ast.Variable{
|
||||
"var.map": ast.Variable{
|
||||
Type: ast.TypeMap,
|
||||
Value: map[string]ast.Variable{
|
||||
"key1": ast.Variable{
|
||||
Type: ast.TypeList,
|
||||
Value: []ast.Variable{
|
||||
{Type: ast.TypeString, Value: "a"},
|
||||
{Type: ast.TypeString, Value: "b"},
|
||||
},
|
||||
},
|
||||
"key2": ast.Variable{
|
||||
Type: ast.TypeList,
|
||||
Value: []ast.Variable{
|
||||
{Type: ast.TypeString, Value: "a"},
|
||||
{Type: ast.TypeString, Value: "b"},
|
||||
{Type: ast.TypeString, Value: "c"},
|
||||
},
|
||||
},
|
||||
"key3": ast.Variable{
|
||||
Type: ast.TypeList,
|
||||
Value: []ast.Variable{
|
||||
{Type: ast.TypeString, Value: "c"},
|
||||
},
|
||||
},
|
||||
"key4": ast.Variable{
|
||||
Type: ast.TypeList,
|
||||
Value: []ast.Variable{},
|
||||
},
|
||||
}},
|
||||
"var.badmap": ast.Variable{
|
||||
Type: ast.TypeMap,
|
||||
Value: map[string]ast.Variable{
|
||||
"key1": ast.Variable{
|
||||
Type: ast.TypeList,
|
||||
Value: []ast.Variable{
|
||||
{Type: ast.TypeList, Value: []ast.Variable{}},
|
||||
{Type: ast.TypeList, Value: []ast.Variable{}},
|
||||
},
|
||||
},
|
||||
}},
|
||||
"var.worsemap": ast.Variable{
|
||||
Type: ast.TypeMap,
|
||||
Value: map[string]ast.Variable{
|
||||
"key1": ast.Variable{
|
||||
Type: ast.TypeString,
|
||||
Value: "not-a-list",
|
||||
},
|
||||
}},
|
||||
},
|
||||
Cases: []testFunctionCase{
|
||||
{
|
||||
`${transpose(var.map)}`,
|
||||
map[string]interface{}{
|
||||
"a": []interface{}{"key1", "key2"},
|
||||
"b": []interface{}{"key1", "key2"},
|
||||
"c": []interface{}{"key2", "key3"},
|
||||
},
|
||||
false,
|
||||
},
|
||||
{
|
||||
`${transpose(var.badmap)}`,
|
||||
nil,
|
||||
true,
|
||||
},
|
||||
{
|
||||
`${transpose(var.worsemap)}`,
|
||||
nil,
|
||||
true,
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestInterpolateFuncAbs(t *testing.T) {
|
||||
testFunction(t, testFunctionConfig{
|
||||
Cases: []testFunctionCase{
|
||||
{
|
||||
`${abs()}`,
|
||||
nil,
|
||||
true,
|
||||
},
|
||||
{
|
||||
`${abs("")}`,
|
||||
nil,
|
||||
true,
|
||||
},
|
||||
{
|
||||
`${abs(0)}`,
|
||||
"0",
|
||||
false,
|
||||
},
|
||||
{
|
||||
`${abs(1)}`,
|
||||
"1",
|
||||
false,
|
||||
},
|
||||
{
|
||||
`${abs(-1)}`,
|
||||
"1",
|
||||
false,
|
||||
},
|
||||
{
|
||||
`${abs(1.0)}`,
|
||||
"1",
|
||||
false,
|
||||
},
|
||||
{
|
||||
`${abs(-1.0)}`,
|
||||
"1",
|
||||
false,
|
||||
},
|
||||
{
|
||||
`${abs(-3.14)}`,
|
||||
"3.14",
|
||||
false,
|
||||
},
|
||||
{
|
||||
`${abs(-42.001)}`,
|
||||
"42.001",
|
||||
false,
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
|
|
@ -79,7 +79,7 @@ func getStorage(s getter.Storage, key string, src string, mode GetMode) (string,
|
|||
}
|
||||
|
||||
const (
|
||||
registryAPI = "https://registry.terraform.io/v1/modules/"
|
||||
registryAPI = "https://registry.terraform.io/v1/modules"
|
||||
xTerraformGet = "X-Terraform-Get"
|
||||
)
|
||||
|
||||
|
|
|
@ -16,11 +16,29 @@ import (
|
|||
version "github.com/hashicorp/go-version"
|
||||
)
|
||||
|
||||
// map of module names and version for test module.
|
||||
// only one version for now, as we only lookup latest from the registry
|
||||
var testMods = map[string]string{
|
||||
"registry/foo/bar": "0.2.3",
|
||||
"registry/foo/baz": "1.10.0",
|
||||
// Map of module names and location of test modules.
|
||||
// Only one version for now, as we only lookup latest from the registry.
|
||||
type testMod struct {
|
||||
location string
|
||||
version string
|
||||
}
|
||||
|
||||
// All the locationes from the mockRegistry start with a file:// scheme. If
|
||||
// the the location string here doesn't have a scheme, the mockRegistry will
|
||||
// find the absolute path and return a complete URL.
|
||||
var testMods = map[string]testMod{
|
||||
"registry/foo/bar": {
|
||||
location: "file:///download/registry/foo/bar/0.2.3//*?archive=tar.gz",
|
||||
version: "0.2.3",
|
||||
},
|
||||
"registry/foo/baz": {
|
||||
location: "file:///download/registry/foo/baz/1.10.0//*?archive=tar.gz",
|
||||
version: "1.10.0",
|
||||
},
|
||||
"registry/local/sub": {
|
||||
location: "test-fixtures/registry-tar-subdir/foo.tgz//*?archive=tar.gz",
|
||||
version: "0.1.2",
|
||||
},
|
||||
}
|
||||
|
||||
func latestVersion(versions []string) string {
|
||||
|
@ -56,13 +74,19 @@ func mockRegistry() *httptest.Server {
|
|||
return
|
||||
}
|
||||
|
||||
version, ok := testMods[matches[1]]
|
||||
mod, ok := testMods[matches[1]]
|
||||
if !ok {
|
||||
w.WriteHeader(http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
|
||||
location := fmt.Sprintf("%s/download/%s/%s", server.URL, matches[1], version)
|
||||
location := mod.location
|
||||
if !strings.HasPrefix(location, "file:///") {
|
||||
// we can't use filepath.Abs because it will clean `//`
|
||||
wd, _ := os.Getwd()
|
||||
location = fmt.Sprintf("file://%s/%s", wd, location)
|
||||
}
|
||||
|
||||
w.Header().Set(xTerraformGet, location)
|
||||
w.WriteHeader(http.StatusNoContent)
|
||||
// no body
|
||||
|
@ -78,7 +102,7 @@ func TestDetectRegistry(t *testing.T) {
|
|||
defer server.Close()
|
||||
|
||||
detector := registryDetector{
|
||||
api: server.URL + "/v1/modules/",
|
||||
api: server.URL + "/v1/modules",
|
||||
client: server.Client(),
|
||||
}
|
||||
|
||||
|
@ -90,12 +114,12 @@ func TestDetectRegistry(t *testing.T) {
|
|||
}{
|
||||
{
|
||||
source: "registry/foo/bar",
|
||||
location: "download/registry/foo/bar/0.2.3",
|
||||
location: testMods["registry/foo/bar"].location,
|
||||
found: true,
|
||||
},
|
||||
{
|
||||
source: "registry/foo/baz",
|
||||
location: "download/registry/foo/baz/1.10.0",
|
||||
location: testMods["registry/foo/baz"].location,
|
||||
found: true,
|
||||
},
|
||||
// this should not be found, but not stop detection
|
||||
|
@ -157,7 +181,7 @@ func TestDetectors(t *testing.T) {
|
|||
}
|
||||
|
||||
regDetector := ®istryDetector{
|
||||
api: server.URL + "/v1/modules/",
|
||||
api: server.URL + "/v1/modules",
|
||||
client: server.Client(),
|
||||
}
|
||||
|
||||
|
@ -177,7 +201,7 @@ func TestDetectors(t *testing.T) {
|
|||
}{
|
||||
{
|
||||
source: "registry/foo/bar",
|
||||
location: "download/registry/foo/bar/0.2.3",
|
||||
location: "file:///download/registry/foo/bar/0.2.3//*?archive=tar.gz",
|
||||
},
|
||||
// this should not be found, but not stop detection
|
||||
{
|
||||
|
@ -248,6 +272,65 @@ func TestDetectors(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
// GitHub archives always contain the module source in a single subdirectory,
|
||||
// so the registry will return a path with with a `//*` suffix. We need to make
|
||||
// sure this doesn't intefere with our internal handling of `//` subdir.
|
||||
func TestRegistryGitHubArchive(t *testing.T) {
|
||||
server := mockRegistry()
|
||||
defer server.Close()
|
||||
|
||||
regDetector := ®istryDetector{
|
||||
api: server.URL + "/v1/modules",
|
||||
client: server.Client(),
|
||||
}
|
||||
|
||||
origDetectors := detectors
|
||||
defer func() {
|
||||
detectors = origDetectors
|
||||
}()
|
||||
|
||||
detectors = []getter.Detector{
|
||||
new(getter.GitHubDetector),
|
||||
new(getter.BitBucketDetector),
|
||||
new(getter.S3Detector),
|
||||
new(localDetector),
|
||||
regDetector,
|
||||
}
|
||||
|
||||
storage := testStorage(t)
|
||||
tree := NewTree("", testConfig(t, "registry-tar-subdir"))
|
||||
|
||||
if err := tree.Load(storage, GetModeGet); err != nil {
|
||||
t.Fatalf("err: %s", err)
|
||||
}
|
||||
|
||||
if !tree.Loaded() {
|
||||
t.Fatal("should be loaded")
|
||||
}
|
||||
|
||||
if err := tree.Load(storage, GetModeNone); err != nil {
|
||||
t.Fatalf("err: %s", err)
|
||||
}
|
||||
|
||||
// stop the registry server, and make sure that we don't need to call out again
|
||||
server.Close()
|
||||
tree = NewTree("", testConfig(t, "registry-tar-subdir"))
|
||||
|
||||
if err := tree.Load(storage, GetModeGet); err != nil {
|
||||
t.Fatalf("err: %s", err)
|
||||
}
|
||||
|
||||
if !tree.Loaded() {
|
||||
t.Fatal("should be loaded")
|
||||
}
|
||||
|
||||
actual := strings.TrimSpace(tree.String())
|
||||
expected := strings.TrimSpace(treeLoadSubdirStr)
|
||||
if actual != expected {
|
||||
t.Fatalf("got: \n\n%s\nexpected: \n\n%s", actual, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAccRegistryDiscover(t *testing.T) {
|
||||
if os.Getenv("TF_ACC") == "" {
|
||||
t.Skip("skipping ACC test")
|
||||
|
@ -272,3 +355,30 @@ func TestAccRegistryDiscover(t *testing.T) {
|
|||
t.Fatalf("url doesn't contain 'consul': %s", u.String())
|
||||
}
|
||||
}
|
||||
|
||||
func TestAccRegistryLoad(t *testing.T) {
|
||||
if os.Getenv("TF_ACC") == "" {
|
||||
t.Skip("skipping ACC test")
|
||||
}
|
||||
|
||||
storage := testStorage(t)
|
||||
tree := NewTree("", testConfig(t, "registry-load"))
|
||||
|
||||
if err := tree.Load(storage, GetModeGet); err != nil {
|
||||
t.Fatalf("err: %s", err)
|
||||
}
|
||||
|
||||
if !tree.Loaded() {
|
||||
t.Fatal("should be loaded")
|
||||
}
|
||||
|
||||
if err := tree.Load(storage, GetModeNone); err != nil {
|
||||
t.Fatalf("err: %s", err)
|
||||
}
|
||||
|
||||
// TODO expand this further by fetching some metadata from the registry
|
||||
actual := strings.TrimSpace(tree.String())
|
||||
if !strings.Contains(actual, "(path: vault)") {
|
||||
t.Fatal("missing vault module, got:\n", actual)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,6 +2,7 @@ package module
|
|||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
@ -10,6 +11,12 @@ import (
|
|||
"github.com/hashicorp/terraform/config"
|
||||
)
|
||||
|
||||
func init() {
|
||||
if os.Getenv("TF_LOG") == "" {
|
||||
log.SetOutput(ioutil.Discard)
|
||||
}
|
||||
}
|
||||
|
||||
const fixtureDir = "./test-fixtures"
|
||||
|
||||
func tempDir(t *testing.T) string {
|
||||
|
|
Binary file not shown.
|
@ -0,0 +1,3 @@
|
|||
module "foo" {
|
||||
source = "./foo.tgz//sub"
|
||||
}
|
|
@ -0,0 +1,3 @@
|
|||
module "vault" {
|
||||
source = "hashicorp/vault/aws"
|
||||
}
|
Binary file not shown.
|
@ -0,0 +1,4 @@
|
|||
module "foo" {
|
||||
// the mock test registry will redirect this to the local tar file
|
||||
source = "registry/local/sub"
|
||||
}
|
Binary file not shown.
|
@ -0,0 +1,4 @@
|
|||
module "foo" {
|
||||
// the module in sub references sibling module baz via "../baz"
|
||||
source = "./foo.tgz//sub"
|
||||
}
|
|
@ -3,7 +3,12 @@ package module
|
|||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
|
@ -176,13 +181,73 @@ func (t *Tree) Load(s getter.Storage, mode GetMode) error {
|
|||
copy(path, t.path)
|
||||
path = append(path, m.Name)
|
||||
|
||||
source, err := getter.Detect(m.Source, t.config.Dir, detectors)
|
||||
// The key is the string that will be hashed to uniquely id the Source.
|
||||
// The leading digit can be incremented to force re-fetch all existing
|
||||
// modules.
|
||||
key := fmt.Sprintf("0.root.%s-%s", strings.Join(path, "."), m.Source)
|
||||
|
||||
log.Printf("[TRACE] module source %q", m.Source)
|
||||
// Split out the subdir if we have one.
|
||||
// Terraform keeps the entire requested tree for now, so that modules can
|
||||
// reference sibling modules from the same archive or repo.
|
||||
source, subDir := getter.SourceDirSubdir(m.Source)
|
||||
|
||||
// First check if we we need to download anything.
|
||||
// This is also checked by the getter.Storage implementation, but we
|
||||
// want to be able to short-circuit the detection as well, since some
|
||||
// detectors may need to make external calls.
|
||||
dir, found, err := s.Dir(key)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// looks like we already have it
|
||||
// In order to load the Tree we need to find out if there was another
|
||||
// subDir stored from discovery.
|
||||
if found && mode != GetModeUpdate {
|
||||
subDir, err := t.getSubdir(dir)
|
||||
if err != nil {
|
||||
// If there's a problem with the subdir record, we'll let the
|
||||
// recordSubdir method fix it up. Any other errors filesystem
|
||||
// errors will turn up again below.
|
||||
log.Println("[WARN] error reading subdir record:", err)
|
||||
} else {
|
||||
dir := filepath.Join(dir, subDir)
|
||||
// Load the configurations.Dir(source)
|
||||
children[m.Name], err = NewTreeModule(m.Name, dir)
|
||||
if err != nil {
|
||||
return fmt.Errorf("module %s: %s", m.Name, err)
|
||||
}
|
||||
// Set the path of this child
|
||||
children[m.Name].path = path
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
log.Printf("[TRACE] module source: %q", source)
|
||||
|
||||
source, err = getter.Detect(source, t.config.Dir, detectors)
|
||||
if err != nil {
|
||||
return fmt.Errorf("module %s: %s", m.Name, err)
|
||||
}
|
||||
// Get the directory where this module is so we can load it
|
||||
key := strings.Join(path, ".")
|
||||
key = fmt.Sprintf("module.%s-%s", key, m.Source)
|
||||
|
||||
log.Printf("[TRACE] detected module source %q", source)
|
||||
|
||||
// Check if the detector introduced something new.
|
||||
// For example, the registry always adds a subdir of `//*`,
|
||||
// indicating that we need to strip off the first component from the
|
||||
// tar archive, though we may not yet know what it is called.
|
||||
//
|
||||
// TODO: This can cause us to lose the previously detected subdir. It
|
||||
// was never an issue before, since none of the supported detectors
|
||||
// previously had this behavior, but we may want to add this ability to
|
||||
// registry modules.
|
||||
source, subDir2 := getter.SourceDirSubdir(source)
|
||||
if subDir2 != "" {
|
||||
subDir = subDir2
|
||||
}
|
||||
|
||||
log.Printf("[TRACE] getting module source %q", source)
|
||||
|
||||
dir, ok, err := getStorage(s, key, source, mode)
|
||||
if err != nil {
|
||||
|
@ -193,12 +258,31 @@ func (t *Tree) Load(s getter.Storage, mode GetMode) error {
|
|||
"module %s: not found, may need to be downloaded using 'terraform get'", m.Name)
|
||||
}
|
||||
|
||||
children[m.Name], err = NewTreeModule(m.Name, dir)
|
||||
if err != nil {
|
||||
return fmt.Errorf(
|
||||
"module %s: %s", m.Name, err)
|
||||
// expand and record the subDir for later
|
||||
if subDir != "" {
|
||||
fullDir, err := getter.SubdirGlob(dir, subDir)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// +1 to account for the pathsep
|
||||
if len(dir)+1 > len(fullDir) {
|
||||
return fmt.Errorf("invalid module storage path %q", fullDir)
|
||||
}
|
||||
|
||||
subDir = fullDir[len(dir)+1:]
|
||||
|
||||
if err := t.recordSubdir(dir, subDir); err != nil {
|
||||
return err
|
||||
}
|
||||
dir = fullDir
|
||||
}
|
||||
|
||||
// Load the configurations.Dir(source)
|
||||
children[m.Name], err = NewTreeModule(m.Name, dir)
|
||||
if err != nil {
|
||||
return fmt.Errorf("module %s: %s", m.Name, err)
|
||||
}
|
||||
// Set the path of this child
|
||||
children[m.Name].path = path
|
||||
}
|
||||
|
@ -216,6 +300,65 @@ func (t *Tree) Load(s getter.Storage, mode GetMode) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func subdirRecordsPath(dir string) string {
|
||||
const filename = "module-subdir.json"
|
||||
// Get the parent directory.
|
||||
// The current FolderStorage implementation needed to be able to create
|
||||
// this directory, so we can be reasonably certain we can use it.
|
||||
parent := filepath.Dir(filepath.Clean(dir))
|
||||
return filepath.Join(parent, filename)
|
||||
}
|
||||
|
||||
// unmarshal the records file in the parent directory. Always returns a valid map.
|
||||
func loadSubdirRecords(dir string) (map[string]string, error) {
|
||||
records := map[string]string{}
|
||||
|
||||
recordsPath := subdirRecordsPath(dir)
|
||||
data, err := ioutil.ReadFile(recordsPath)
|
||||
if err != nil && !os.IsNotExist(err) {
|
||||
return records, err
|
||||
}
|
||||
|
||||
if len(data) == 0 {
|
||||
return records, nil
|
||||
}
|
||||
|
||||
if err := json.Unmarshal(data, &records); err != nil {
|
||||
return records, err
|
||||
}
|
||||
return records, nil
|
||||
}
|
||||
|
||||
func (t *Tree) getSubdir(dir string) (string, error) {
|
||||
records, err := loadSubdirRecords(dir)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return records[dir], nil
|
||||
}
|
||||
|
||||
// Mark the location of a detected subdir in a top-level file so we
|
||||
// can skip detection when not updating the module.
|
||||
func (t *Tree) recordSubdir(dir, subdir string) error {
|
||||
records, err := loadSubdirRecords(dir)
|
||||
if err != nil {
|
||||
// if there was a problem with the file, we will attempt to write a new
|
||||
// one. Any non-data related error should surface there.
|
||||
log.Printf("[WARN] error reading subdir records: %s", err)
|
||||
}
|
||||
|
||||
records[dir] = subdir
|
||||
|
||||
js, err := json.Marshal(records)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
recordsPath := subdirRecordsPath(dir)
|
||||
return ioutil.WriteFile(recordsPath, js, 0644)
|
||||
}
|
||||
|
||||
// Path is the full path to this tree.
|
||||
func (t *Tree) Path() []string {
|
||||
return t.path
|
||||
|
|
|
@ -2,7 +2,9 @@ package module
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"strings"
|
||||
"testing"
|
||||
|
@ -209,40 +211,120 @@ func TestTreeLoad_parentRef(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestTreeLoad_subdir(t *testing.T) {
|
||||
storage := testStorage(t)
|
||||
tree := NewTree("", testConfig(t, "basic-subdir"))
|
||||
fixtures := []string{
|
||||
"basic-subdir",
|
||||
"basic-tar-subdir",
|
||||
|
||||
if tree.Loaded() {
|
||||
t.Fatal("should not be loaded")
|
||||
// Passing a subpath to go getter extracts only this subpath. The old
|
||||
// internal code would keep the entire directory structure, allowing a
|
||||
// top-level module to reference others through its parent directory.
|
||||
// TODO: this can be removed as a breaking change in a major release.
|
||||
"tar-subdir-to-parent",
|
||||
}
|
||||
|
||||
// This should error because we haven't gotten things yet
|
||||
if err := tree.Load(storage, GetModeNone); err == nil {
|
||||
t.Fatal("should error")
|
||||
for _, tc := range fixtures {
|
||||
t.Run(tc, func(t *testing.T) {
|
||||
storage := testStorage(t)
|
||||
tree := NewTree("", testConfig(t, tc))
|
||||
|
||||
if tree.Loaded() {
|
||||
t.Fatal("should not be loaded")
|
||||
}
|
||||
|
||||
// This should error because we haven't gotten things yet
|
||||
if err := tree.Load(storage, GetModeNone); err == nil {
|
||||
t.Fatal("should error")
|
||||
}
|
||||
|
||||
if tree.Loaded() {
|
||||
t.Fatal("should not be loaded")
|
||||
}
|
||||
|
||||
// This should get things
|
||||
if err := tree.Load(storage, GetModeGet); err != nil {
|
||||
t.Fatalf("err: %s", err)
|
||||
}
|
||||
|
||||
if !tree.Loaded() {
|
||||
t.Fatal("should be loaded")
|
||||
}
|
||||
|
||||
// This should no longer error
|
||||
if err := tree.Load(storage, GetModeNone); err != nil {
|
||||
t.Fatalf("err: %s", err)
|
||||
}
|
||||
|
||||
actual := strings.TrimSpace(tree.String())
|
||||
expected := strings.TrimSpace(treeLoadSubdirStr)
|
||||
if actual != expected {
|
||||
t.Fatalf("bad: \n\n%s", actual)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestTree_recordSubDir(t *testing.T) {
|
||||
td, err := ioutil.TempDir("", "tf-module")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
defer os.RemoveAll(td)
|
||||
|
||||
dir := filepath.Join(td, "0131bf0fef686e090b16bdbab4910ddf")
|
||||
|
||||
subDir := "subDirName"
|
||||
|
||||
tree := Tree{}
|
||||
|
||||
// record and read the subdir path
|
||||
if err := tree.recordSubdir(dir, subDir); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
actual, err := tree.getSubdir(dir)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if tree.Loaded() {
|
||||
t.Fatal("should not be loaded")
|
||||
if actual != subDir {
|
||||
t.Fatalf("expected subDir %q, got %q", subDir, actual)
|
||||
}
|
||||
|
||||
// This should get things
|
||||
if err := tree.Load(storage, GetModeGet); err != nil {
|
||||
t.Fatalf("err: %s", err)
|
||||
// overwrite the path, and nmake sure we get the new one
|
||||
subDir = "newSubDir"
|
||||
if err := tree.recordSubdir(dir, subDir); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
actual, err = tree.getSubdir(dir)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if !tree.Loaded() {
|
||||
t.Fatal("should be loaded")
|
||||
if actual != subDir {
|
||||
t.Fatalf("expected subDir %q, got %q", subDir, actual)
|
||||
}
|
||||
|
||||
// This should no longer error
|
||||
if err := tree.Load(storage, GetModeNone); err != nil {
|
||||
t.Fatalf("err: %s", err)
|
||||
// create a fake entry
|
||||
if err := ioutil.WriteFile(subdirRecordsPath(dir), []byte("BAD DATA"), 0644); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
actual := strings.TrimSpace(tree.String())
|
||||
expected := strings.TrimSpace(treeLoadSubdirStr)
|
||||
if actual != expected {
|
||||
t.Fatalf("bad: \n\n%s", actual)
|
||||
// this should fail because there aare now 2 entries
|
||||
actual, err = tree.getSubdir(dir)
|
||||
if err == nil {
|
||||
t.Fatal("expected multiple subdir entries")
|
||||
}
|
||||
|
||||
// writing the subdir entry should remove the incorrect value
|
||||
if err := tree.recordSubdir(dir, subDir); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
actual, err = tree.getSubdir(dir)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if actual != subDir {
|
||||
t.Fatalf("expected subDir %q, got %q", subDir, actual)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -26,7 +26,7 @@ These apply to all contributors, but maintainers should lead by examples! :wink:
|
|||
- if the PR submitter has push privileges (recognizable via `Collaborator`, `Member` or `Owner` badge) - we expect **the submitter** to merge their own PR after receiving a positive review from either HC employee or another maintainer. _Exceptions apply - see below._
|
||||
- we prefer to use the Github's interface or API to do this, just click the green button
|
||||
- squash?
|
||||
- squash when you think the commit history is irrelevant (will not be helpful for any readers in T+6mons)
|
||||
- squash when you think the commit history is irrelevant (will not be helpful for any readers in T+6months)
|
||||
- Add the new PR to the **Changelog** if it may affect the user (almost any PR except test changes and docs updates)
|
||||
- we prefer to use the Github's web interface to modify the Changelog and use `[GH-12345]` to format the PR number. These will be turned into links as part of the release process. Breaking changes should be always documented separately.
|
||||
|
||||
|
|
|
@ -17,6 +17,7 @@ import (
|
|||
type binary struct {
|
||||
binPath string
|
||||
workDir string
|
||||
env []string
|
||||
}
|
||||
|
||||
// NewBinary prepares a temporary directory containing the files from the
|
||||
|
@ -93,6 +94,12 @@ func NewBinary(binaryPath, workingDir string) *binary {
|
|||
}
|
||||
}
|
||||
|
||||
// AddEnv appends an entry to the environment variable table passed to any
|
||||
// commands subsequently run.
|
||||
func (b *binary) AddEnv(entry string) {
|
||||
b.env = append(b.env, entry)
|
||||
}
|
||||
|
||||
// Cmd returns an exec.Cmd pre-configured to run the generated Terraform
|
||||
// binary with the given arguments in the temporary working directory.
|
||||
//
|
||||
|
@ -108,6 +115,8 @@ func (b *binary) Cmd(args ...string) *exec.Cmd {
|
|||
// end-to-end testing of our Checkpoint interactions.)
|
||||
cmd.Env = append(cmd.Env, "CHECKPOINT_DISABLE=1")
|
||||
|
||||
cmd.Env = append(cmd.Env, b.env...)
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
|
|
|
@ -3,6 +3,7 @@ package validation
|
|||
import (
|
||||
"fmt"
|
||||
"net"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
|
@ -105,6 +106,24 @@ func StringLenBetween(min, max int) schema.SchemaValidateFunc {
|
|||
}
|
||||
}
|
||||
|
||||
// NoZeroValues is a SchemaValidateFunc which tests if the provided value is
|
||||
// not a zero value. It's useful in situations where you want to catch
|
||||
// explicit zero values on things like required fields during validation.
|
||||
func NoZeroValues(i interface{}, k string) (s []string, es []error) {
|
||||
if reflect.ValueOf(i).Interface() == reflect.Zero(reflect.TypeOf(i)).Interface() {
|
||||
switch reflect.TypeOf(i).Kind() {
|
||||
case reflect.String:
|
||||
es = append(es, fmt.Errorf("%s must not be empty", k))
|
||||
case reflect.Int, reflect.Float64:
|
||||
es = append(es, fmt.Errorf("%s must not be zero", k))
|
||||
default:
|
||||
// this validator should only ever be applied to TypeString, TypeInt and TypeFloat
|
||||
panic(fmt.Errorf("can't use NoZeroValues with %T attribute %s", i, k))
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// CIDRNetwork returns a SchemaValidateFunc which tests if the provided value
|
||||
// is of type string, is in valid CIDR network notation, and has significant bits between min and max (inclusive)
|
||||
func CIDRNetwork(min, max int) schema.SchemaValidateFunc {
|
||||
|
|
|
@ -199,6 +199,38 @@ func TestValidateListUniqueStrings(t *testing.T) {
|
|||
})
|
||||
}
|
||||
|
||||
func TestValidationNoZeroValues(t *testing.T) {
|
||||
runTestCases(t, []testCase{
|
||||
{
|
||||
val: "foo",
|
||||
f: NoZeroValues,
|
||||
},
|
||||
{
|
||||
val: 1,
|
||||
f: NoZeroValues,
|
||||
},
|
||||
{
|
||||
val: float64(1),
|
||||
f: NoZeroValues,
|
||||
},
|
||||
{
|
||||
val: "",
|
||||
f: NoZeroValues,
|
||||
expectedErr: regexp.MustCompile("must not be empty"),
|
||||
},
|
||||
{
|
||||
val: 0,
|
||||
f: NoZeroValues,
|
||||
expectedErr: regexp.MustCompile("must not be zero"),
|
||||
},
|
||||
{
|
||||
val: float64(0),
|
||||
f: NoZeroValues,
|
||||
expectedErr: regexp.MustCompile("must not be zero"),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func runTestCases(t *testing.T, cases []testCase) {
|
||||
matchErr := func(errs []error, r *regexp.Regexp) bool {
|
||||
// err must match one provided
|
||||
|
|
29
main.go
29
main.go
|
@ -6,6 +6,7 @@ import (
|
|||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
"sync"
|
||||
|
@ -95,6 +96,16 @@ func realMain() int {
|
|||
return wrappedMain()
|
||||
}
|
||||
|
||||
func init() {
|
||||
Ui = &cli.PrefixedUi{
|
||||
AskPrefix: OutputPrefix,
|
||||
OutputPrefix: OutputPrefix,
|
||||
InfoPrefix: OutputPrefix,
|
||||
ErrorPrefix: ErrorPrefix,
|
||||
Ui: &cli.BasicUi{Writer: os.Stdout},
|
||||
}
|
||||
}
|
||||
|
||||
func wrappedMain() int {
|
||||
// We always need to close the DebugInfo before we exit.
|
||||
defer terraform.CloseDebugInfo()
|
||||
|
@ -127,6 +138,18 @@ func wrappedMain() int {
|
|||
config = *config.Merge(usrcfg)
|
||||
}
|
||||
|
||||
if envConfig := EnvConfig(); envConfig != nil {
|
||||
// envConfig takes precedence
|
||||
config = *envConfig.Merge(&config)
|
||||
}
|
||||
|
||||
log.Printf("[DEBUG] CLI Config is %#v", config)
|
||||
|
||||
// In tests, Commands may already be set to provide mock commands
|
||||
if Commands == nil {
|
||||
initCommands(&config)
|
||||
}
|
||||
|
||||
// Run checkpoint
|
||||
go runCheckpoint(&config)
|
||||
|
||||
|
@ -134,6 +157,7 @@ func wrappedMain() int {
|
|||
defer plugin.CleanupClients()
|
||||
|
||||
// Get the command line args.
|
||||
binName := filepath.Base(os.Args[0])
|
||||
args := os.Args[1:]
|
||||
|
||||
// Build the CLI so far, we do this so we can query the subcommand.
|
||||
|
@ -175,10 +199,15 @@ func wrappedMain() int {
|
|||
// Rebuild the CLI with any modified args.
|
||||
log.Printf("[INFO] CLI command args: %#v", args)
|
||||
cliRunner = &cli.CLI{
|
||||
Name: binName,
|
||||
Args: args,
|
||||
Commands: Commands,
|
||||
HelpFunc: helpFunc,
|
||||
HelpWriter: os.Stdout,
|
||||
|
||||
Autocomplete: true,
|
||||
AutocompleteInstall: "install-autocomplete",
|
||||
AutocompleteUninstall: "uninstall-autocomplete",
|
||||
}
|
||||
|
||||
// Pass in the overriding plugin paths from config
|
||||
|
|
13
main_test.go
13
main_test.go
|
@ -18,9 +18,12 @@ func TestMain_cliArgsFromEnv(t *testing.T) {
|
|||
defer func() { os.Args = oldArgs }()
|
||||
|
||||
// Setup test command and restore that
|
||||
Commands = make(map[string]cli.CommandFactory)
|
||||
defer func() {
|
||||
Commands = nil
|
||||
}()
|
||||
testCommandName := "unit-test-cli-args"
|
||||
testCommand := &testCommandCLI{}
|
||||
defer func() { delete(Commands, testCommandName) }()
|
||||
Commands[testCommandName] = func() (cli.Command, error) {
|
||||
return testCommand, nil
|
||||
}
|
||||
|
@ -150,6 +153,12 @@ func TestMain_cliArgsFromEnvAdvanced(t *testing.T) {
|
|||
oldArgs := os.Args
|
||||
defer func() { os.Args = oldArgs }()
|
||||
|
||||
// Setup test command and restore that
|
||||
Commands = make(map[string]cli.CommandFactory)
|
||||
defer func() {
|
||||
Commands = nil
|
||||
}()
|
||||
|
||||
cases := []struct {
|
||||
Name string
|
||||
Command string
|
||||
|
@ -230,7 +239,7 @@ func TestMain_cliArgsFromEnvAdvanced(t *testing.T) {
|
|||
testCommand.Args = nil
|
||||
exit := wrappedMain()
|
||||
if (exit != 0) != tc.Err {
|
||||
t.Fatalf("bad: %d", exit)
|
||||
t.Fatalf("unexpected exit status %d; want 0", exit)
|
||||
}
|
||||
if tc.Err {
|
||||
return
|
||||
|
|
|
@ -3,6 +3,7 @@ package discovery
|
|||
import (
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
@ -70,6 +71,12 @@ func findPluginPaths(kind string, dirs []string) []string {
|
|||
continue
|
||||
}
|
||||
|
||||
// Check that the file we found is usable
|
||||
if !pathIsFile(absPath) {
|
||||
log.Printf("[ERROR] ignoring non-file %s", absPath)
|
||||
continue
|
||||
}
|
||||
|
||||
log.Printf("[DEBUG] found %s %q", kind, fullName)
|
||||
ret = append(ret, filepath.Clean(absPath))
|
||||
continue
|
||||
|
@ -82,6 +89,12 @@ func findPluginPaths(kind string, dirs []string) []string {
|
|||
continue
|
||||
}
|
||||
|
||||
// Check that the file we found is usable
|
||||
if !pathIsFile(absPath) {
|
||||
log.Printf("[ERROR] ignoring non-file %s", absPath)
|
||||
continue
|
||||
}
|
||||
|
||||
log.Printf("[WARNING] found legacy %s %q", kind, fullName)
|
||||
|
||||
ret = append(ret, filepath.Clean(absPath))
|
||||
|
@ -91,6 +104,17 @@ func findPluginPaths(kind string, dirs []string) []string {
|
|||
return ret
|
||||
}
|
||||
|
||||
// Returns true if and only if the given path refers to a file or a symlink
|
||||
// to a file.
|
||||
func pathIsFile(path string) bool {
|
||||
info, err := os.Stat(path)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
return !info.IsDir()
|
||||
}
|
||||
|
||||
// ResolvePluginPaths takes a list of paths to plugin executables (as returned
|
||||
// by e.g. FindPluginPaths) and produces a PluginMetaSet describing the
|
||||
// referenced plugins.
|
||||
|
|
|
@ -3,10 +3,12 @@ package discovery
|
|||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
@ -48,6 +50,10 @@ type Installer interface {
|
|||
type ProviderInstaller struct {
|
||||
Dir string
|
||||
|
||||
// Cache is used to access and update a local cache of plugins if non-nil.
|
||||
// Can be nil to disable caching.
|
||||
Cache PluginCache
|
||||
|
||||
PluginProtocolVersion uint
|
||||
|
||||
// OS and Arch specify the OS and architecture that should be used when
|
||||
|
@ -101,6 +107,12 @@ func (i *ProviderInstaller) Get(provider string, req Constraints) (PluginMeta, e
|
|||
// sort them newest to oldest
|
||||
Versions(versions).Sort()
|
||||
|
||||
// Ensure that our installation directory exists
|
||||
err = os.MkdirAll(i.Dir, os.ModePerm)
|
||||
if err != nil {
|
||||
return PluginMeta{}, fmt.Errorf("failed to create plugin dir %s: %s", i.Dir, err)
|
||||
}
|
||||
|
||||
// take the first matching plugin we find
|
||||
for _, v := range versions {
|
||||
url := i.providerURL(provider, v.String())
|
||||
|
@ -120,8 +132,8 @@ func (i *ProviderInstaller) Get(provider string, req Constraints) (PluginMeta, e
|
|||
log.Printf("[DEBUG] fetching provider info for %s version %s", provider, v)
|
||||
if checkPlugin(url, i.PluginProtocolVersion) {
|
||||
i.Ui.Info(fmt.Sprintf("- Downloading plugin for provider %q (%s)...", provider, v.String()))
|
||||
log.Printf("[DEBUG] getting provider %q version %q at %s", provider, v, url)
|
||||
err := getter.Get(i.Dir, url)
|
||||
log.Printf("[DEBUG] getting provider %q version %q", provider, v)
|
||||
err := i.install(provider, v, url)
|
||||
if err != nil {
|
||||
return PluginMeta{}, err
|
||||
}
|
||||
|
@ -168,6 +180,98 @@ func (i *ProviderInstaller) Get(provider string, req Constraints) (PluginMeta, e
|
|||
return PluginMeta{}, ErrorNoVersionCompatible
|
||||
}
|
||||
|
||||
func (i *ProviderInstaller) install(provider string, version Version, url string) error {
|
||||
if i.Cache != nil {
|
||||
log.Printf("[DEBUG] looking for provider %s %s in plugin cache", provider, version)
|
||||
cached := i.Cache.CachedPluginPath("provider", provider, version)
|
||||
if cached == "" {
|
||||
log.Printf("[DEBUG] %s %s not yet in cache, so downloading %s", provider, version, url)
|
||||
err := getter.Get(i.Cache.InstallDir(), url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// should now be in cache
|
||||
cached = i.Cache.CachedPluginPath("provider", provider, version)
|
||||
if cached == "" {
|
||||
// should never happen if the getter is behaving properly
|
||||
// and the plugins are packaged properly.
|
||||
return fmt.Errorf("failed to find downloaded plugin in cache %s", i.Cache.InstallDir())
|
||||
}
|
||||
}
|
||||
|
||||
// Link or copy the cached binary into our install dir so the
|
||||
// normal resolution machinery can find it.
|
||||
filename := filepath.Base(cached)
|
||||
targetPath := filepath.Join(i.Dir, filename)
|
||||
|
||||
log.Printf("[DEBUG] installing %s %s to %s from local cache %s", provider, version, targetPath, cached)
|
||||
|
||||
// Delete if we can. If there's nothing there already then no harm done.
|
||||
// This is important because we can't create a link if there's
|
||||
// already a file of the same name present.
|
||||
// (any other error here we'll catch below when we try to write here)
|
||||
os.Remove(targetPath)
|
||||
|
||||
// We don't attempt linking on Windows because links are not
|
||||
// comprehensively supported by all tools/apps in Windows and
|
||||
// so we choose to be conservative to avoid creating any
|
||||
// weird issues for Windows users.
|
||||
linkErr := errors.New("link not supported for Windows") // placeholder error, never actually returned
|
||||
if runtime.GOOS != "windows" {
|
||||
// Try hard linking first. Hard links are preferable because this
|
||||
// creates a self-contained directory that doesn't depend on the
|
||||
// cache after install.
|
||||
linkErr = os.Link(cached, targetPath)
|
||||
|
||||
// If that failed, try a symlink. This _does_ depend on the cache
|
||||
// after install, so the user must manage the cache more carefully
|
||||
// in this case, but avoids creating redundant copies of the
|
||||
// plugins on disk.
|
||||
if linkErr != nil {
|
||||
linkErr = os.Symlink(cached, targetPath)
|
||||
}
|
||||
}
|
||||
|
||||
// If we still have an error then we'll try a copy as a fallback.
|
||||
// In this case either the OS is Windows or the target filesystem
|
||||
// can't support symlinks.
|
||||
if linkErr != nil {
|
||||
srcFile, err := os.Open(cached)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to open cached plugin %s: %s", cached, err)
|
||||
}
|
||||
defer srcFile.Close()
|
||||
|
||||
destFile, err := os.OpenFile(targetPath, os.O_TRUNC|os.O_CREATE|os.O_WRONLY, os.ModePerm)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create %s: %s", targetPath, err)
|
||||
}
|
||||
|
||||
_, err = io.Copy(destFile, srcFile)
|
||||
if err != nil {
|
||||
destFile.Close()
|
||||
return fmt.Errorf("failed to copy cached plugin from %s to %s: %s", cached, targetPath, err)
|
||||
}
|
||||
|
||||
err = destFile.Close()
|
||||
if err != nil {
|
||||
return fmt.Errorf("error creating %s: %s", targetPath, err)
|
||||
}
|
||||
}
|
||||
|
||||
// One way or another, by the time we get here we should have either
|
||||
// a link or a copy of the cached plugin within i.Dir, as expected.
|
||||
} else {
|
||||
log.Printf("[DEBUG] plugin cache is disabled, so downloading %s %s from %s", provider, version, url)
|
||||
err := getter.Get(i.Dir, url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (i *ProviderInstaller) PurgeUnused(used map[string]PluginMeta) (PluginMetaSet, error) {
|
||||
purge := make(PluginMetaSet)
|
||||
|
||||
|
|
|
@ -0,0 +1,48 @@
|
|||
package discovery
|
||||
|
||||
// PluginCache is an interface implemented by objects that are able to maintain
|
||||
// a cache of plugins.
|
||||
type PluginCache interface {
|
||||
// CachedPluginPath returns a path where the requested plugin is already
|
||||
// cached, or an empty string if the requested plugin is not yet cached.
|
||||
CachedPluginPath(kind string, name string, version Version) string
|
||||
|
||||
// InstallDir returns the directory that new plugins should be installed into
|
||||
// in order to populate the cache. This directory should be used as the
|
||||
// first argument to getter.Get when downloading plugins with go-getter.
|
||||
//
|
||||
// After installing into this directory, use CachedPluginPath to obtain the
|
||||
// path where the plugin was installed.
|
||||
InstallDir() string
|
||||
}
|
||||
|
||||
// NewLocalPluginCache returns a PluginCache that caches plugins in a
|
||||
// given local directory.
|
||||
func NewLocalPluginCache(dir string) PluginCache {
|
||||
return &pluginCache{
|
||||
Dir: dir,
|
||||
}
|
||||
}
|
||||
|
||||
type pluginCache struct {
|
||||
Dir string
|
||||
}
|
||||
|
||||
func (c *pluginCache) CachedPluginPath(kind string, name string, version Version) string {
|
||||
allPlugins := FindPlugins(kind, []string{c.Dir})
|
||||
plugins := allPlugins.WithName(name).WithVersion(version)
|
||||
|
||||
if plugins.Count() == 0 {
|
||||
// nothing cached
|
||||
return ""
|
||||
}
|
||||
|
||||
// There should generally be only one plugin here; if there's more than
|
||||
// one match for some reason then we'll just choose one arbitrarily.
|
||||
plugin := plugins.Newest()
|
||||
return plugin.Path
|
||||
}
|
||||
|
||||
func (c *pluginCache) InstallDir() string {
|
||||
return c.Dir
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
package discovery
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestLocalPluginCache(t *testing.T) {
|
||||
cache := NewLocalPluginCache("test-fixtures/plugin-cache")
|
||||
|
||||
foo1Path := cache.CachedPluginPath("provider", "foo", VersionStr("v0.0.1").MustParse())
|
||||
if foo1Path == "" {
|
||||
t.Errorf("foo v0.0.1 not found; should have been found")
|
||||
}
|
||||
|
||||
foo2Path := cache.CachedPluginPath("provider", "foo", VersionStr("v0.0.2").MustParse())
|
||||
if foo2Path != "" {
|
||||
t.Errorf("foo v0.0.2 found at %s; should not have been found", foo2Path)
|
||||
}
|
||||
|
||||
baz1Path := cache.CachedPluginPath("provider", "baz", VersionStr("v0.0.1").MustParse())
|
||||
if baz1Path != "" {
|
||||
t.Errorf("baz v0.0.1 found at %s; should not have been found", baz1Path)
|
||||
}
|
||||
|
||||
baz2Path := cache.CachedPluginPath("provider", "baz", VersionStr("v0.0.2").MustParse())
|
||||
if baz1Path != "" {
|
||||
t.Errorf("baz v0.0.2 found at %s; should not have been found", baz2Path)
|
||||
}
|
||||
}
|
|
@ -8809,3 +8809,127 @@ module.child:
|
|||
t.Fatalf("wrong final state\ngot:\n%s\nwant:\n%s", got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestContext2Apply_destroyWithLocals(t *testing.T) {
|
||||
m := testModule(t, "apply-destroy-with-locals")
|
||||
p := testProvider("aws")
|
||||
p.ApplyFn = testApplyFn
|
||||
p.DiffFn = func(info *InstanceInfo, s *InstanceState, c *ResourceConfig) (*InstanceDiff, error) {
|
||||
d, err := testDiffFn(info, s, c)
|
||||
fmt.Println("DIFF:", d)
|
||||
return d, err
|
||||
}
|
||||
|
||||
s := &State{
|
||||
Modules: []*ModuleState{
|
||||
&ModuleState{
|
||||
Path: rootModulePath,
|
||||
Outputs: map[string]*OutputState{
|
||||
"name": &OutputState{
|
||||
Type: "string",
|
||||
Value: "test-bar",
|
||||
},
|
||||
},
|
||||
Resources: map[string]*ResourceState{
|
||||
"aws_instance.foo": &ResourceState{
|
||||
Type: "aws_instance",
|
||||
Primary: &InstanceState{
|
||||
ID: "foo",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
ctx := testContext2(t, &ContextOpts{
|
||||
Module: m,
|
||||
ProviderResolver: ResourceProviderResolverFixed(
|
||||
map[string]ResourceProviderFactory{
|
||||
"aws": testProviderFuncFixed(p),
|
||||
},
|
||||
),
|
||||
State: s,
|
||||
Destroy: true,
|
||||
})
|
||||
|
||||
if _, err := ctx.Plan(); err != nil {
|
||||
t.Fatalf("err: %s", err)
|
||||
}
|
||||
|
||||
state, err := ctx.Apply()
|
||||
if err != nil {
|
||||
t.Fatalf("error during apply: %s", err)
|
||||
}
|
||||
|
||||
got := strings.TrimSpace(state.String())
|
||||
want := strings.TrimSpace(`<no state>`)
|
||||
if got != want {
|
||||
t.Fatalf("wrong final state\ngot:\n%s\nwant:\n%s", got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestContext2Apply_providerWithLocals(t *testing.T) {
|
||||
m := testModule(t, "provider-with-locals")
|
||||
p := testProvider("aws")
|
||||
|
||||
providerRegion := ""
|
||||
// this should not be overridden during destroy
|
||||
p.ConfigureFn = func(c *ResourceConfig) error {
|
||||
if r, ok := c.Get("region"); ok {
|
||||
providerRegion = r.(string)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
p.DiffFn = testDiffFn
|
||||
p.ApplyFn = testApplyFn
|
||||
ctx := testContext2(t, &ContextOpts{
|
||||
Module: m,
|
||||
ProviderResolver: ResourceProviderResolverFixed(
|
||||
map[string]ResourceProviderFactory{
|
||||
"aws": testProviderFuncFixed(p),
|
||||
},
|
||||
),
|
||||
})
|
||||
|
||||
if _, err := ctx.Plan(); err != nil {
|
||||
t.Fatalf("err: %s", err)
|
||||
}
|
||||
|
||||
state, err := ctx.Apply()
|
||||
if err != nil {
|
||||
t.Fatalf("err: %s", err)
|
||||
}
|
||||
|
||||
ctx = testContext2(t, &ContextOpts{
|
||||
Module: m,
|
||||
ProviderResolver: ResourceProviderResolverFixed(
|
||||
map[string]ResourceProviderFactory{
|
||||
"aws": testProviderFuncFixed(p),
|
||||
},
|
||||
),
|
||||
State: state,
|
||||
Destroy: true,
|
||||
})
|
||||
|
||||
if _, err = ctx.Plan(); err != nil {
|
||||
t.Fatalf("err: %s", err)
|
||||
}
|
||||
|
||||
state, err = ctx.Apply()
|
||||
if err != nil {
|
||||
t.Fatalf("err: %s", err)
|
||||
}
|
||||
|
||||
if state.HasResources() {
|
||||
t.Fatal("expected no state, got:", state)
|
||||
}
|
||||
|
||||
// Destroy won't work because the local value is removed before the
|
||||
// provider. Once this is fixed this test will start to fail, and we
|
||||
// can remove the invalid interpolation string;
|
||||
// if providerRegion != "bar" {
|
||||
if providerRegion != "${local.foo}" {
|
||||
t.Fatalf("expected region %q, got: %q", "bar", providerRegion)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -56,3 +56,31 @@ func (n *EvalLocal) Eval(ctx EvalContext) (interface{}, error) {
|
|||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// EvalDeleteLocal is an EvalNode implementation that deletes a Local value
|
||||
// from the state. Locals aren't persisted, but we don't need to evaluate them
|
||||
// during destroy.
|
||||
type EvalDeleteLocal struct {
|
||||
Name string
|
||||
}
|
||||
|
||||
func (n *EvalDeleteLocal) Eval(ctx EvalContext) (interface{}, error) {
|
||||
state, lock := ctx.State()
|
||||
if state == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// Get a write lock so we can access this instance
|
||||
lock.Lock()
|
||||
defer lock.Unlock()
|
||||
|
||||
// Look for the module state. If we don't have one, create it.
|
||||
mod := state.ModuleByPath(ctx.Path())
|
||||
if mod == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
delete(mod.Locals, n.Name)
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
|
|
@ -59,20 +59,36 @@ func (n *NodeLocal) References() []string {
|
|||
|
||||
// GraphNodeEvalable
|
||||
func (n *NodeLocal) EvalTree() EvalNode {
|
||||
return &EvalOpFilter{
|
||||
Ops: []walkOperation{
|
||||
walkInput,
|
||||
walkValidate,
|
||||
walkRefresh,
|
||||
walkPlan,
|
||||
walkApply,
|
||||
walkDestroy,
|
||||
},
|
||||
Node: &EvalSequence{
|
||||
Nodes: []EvalNode{
|
||||
&EvalLocal{
|
||||
Name: n.Config.Name,
|
||||
Value: n.Config.RawConfig,
|
||||
return &EvalSequence{
|
||||
Nodes: []EvalNode{
|
||||
&EvalOpFilter{
|
||||
Ops: []walkOperation{
|
||||
walkInput,
|
||||
walkValidate,
|
||||
walkRefresh,
|
||||
walkPlan,
|
||||
walkApply,
|
||||
},
|
||||
Node: &EvalSequence{
|
||||
Nodes: []EvalNode{
|
||||
&EvalLocal{
|
||||
Name: n.Config.Name,
|
||||
Value: n.Config.RawConfig,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
&EvalOpFilter{
|
||||
Ops: []walkOperation{
|
||||
walkPlanDestroy,
|
||||
walkDestroy,
|
||||
},
|
||||
Node: &EvalSequence{
|
||||
Nodes: []EvalNode{
|
||||
&EvalDeleteLocal{
|
||||
Name: n.Config.Name,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
locals {
|
||||
name = "test-${aws_instance.foo.id}"
|
||||
}
|
||||
resource "aws_instance" "foo" {}
|
||||
|
||||
output "name" {
|
||||
value = "${local.name}"
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
provider "aws" {
|
||||
region = "${local.foo}"
|
||||
}
|
||||
|
||||
locals {
|
||||
foo = "bar"
|
||||
}
|
||||
|
||||
resource "aws_instance" "foo" {
|
||||
value = "${local.foo}"
|
||||
}
|
|
@ -7,7 +7,7 @@ import (
|
|||
)
|
||||
|
||||
// The main version number that is being run at the moment.
|
||||
const Version = "0.10.6"
|
||||
const Version = "0.10.7"
|
||||
|
||||
// A pre-release marker for the version. If this is "" (empty string)
|
||||
// then it means that it is a final release. Otherwise, this is a pre-release
|
||||
|
|
|
@ -49,6 +49,11 @@ func SubdirGlob(dst, subDir string) (string, error) {
|
|||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if len(matches) == 0 {
|
||||
return "", fmt.Errorf("subdir %q not found", subDir)
|
||||
}
|
||||
|
||||
if len(matches) > 1 {
|
||||
return "", fmt.Errorf("subdir %q matches multiple paths", subDir)
|
||||
}
|
||||
|
|
|
@ -59,8 +59,20 @@ type CLI struct {
|
|||
// For example, if the key is "foo bar", then to access it our CLI
|
||||
// must be accessed with "./cli foo bar". See the docs for CLI for
|
||||
// notes on how this changes some other behavior of the CLI as well.
|
||||
//
|
||||
// The factory should be as cheap as possible, ideally only allocating
|
||||
// a struct. The factory may be called multiple times in the course
|
||||
// of a command execution and certain events such as help require the
|
||||
// instantiation of all commands. Expensive initialization should be
|
||||
// deferred to function calls within the interface implementation.
|
||||
Commands map[string]CommandFactory
|
||||
|
||||
// HiddenCommands is a list of commands that are "hidden". Hidden
|
||||
// commands are not given to the help function callback and do not
|
||||
// show up in autocomplete. The values in the slice should be equivalent
|
||||
// to the keys in the command map.
|
||||
HiddenCommands []string
|
||||
|
||||
// Name defines the name of the CLI.
|
||||
Name string
|
||||
|
||||
|
@ -116,6 +128,7 @@ type CLI struct {
|
|||
autocomplete *complete.Complete
|
||||
commandTree *radix.Tree
|
||||
commandNested bool
|
||||
commandHidden map[string]struct{}
|
||||
subcommand string
|
||||
subcommandArgs []string
|
||||
topFlags []string
|
||||
|
@ -173,7 +186,7 @@ func (c *CLI) Run() (int, error) {
|
|||
|
||||
// Just print the help when only '-h' or '--help' is passed.
|
||||
if c.IsHelp() && c.Subcommand() == "" {
|
||||
c.HelpWriter.Write([]byte(c.HelpFunc(c.Commands) + "\n"))
|
||||
c.HelpWriter.Write([]byte(c.HelpFunc(c.helpCommands(c.Subcommand())) + "\n"))
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
|
@ -216,7 +229,7 @@ func (c *CLI) Run() (int, error) {
|
|||
raw, ok := c.commandTree.Get(c.Subcommand())
|
||||
if !ok {
|
||||
c.HelpWriter.Write([]byte(c.HelpFunc(c.helpCommands(c.subcommandParent())) + "\n"))
|
||||
return 1, nil
|
||||
return 127, nil
|
||||
}
|
||||
|
||||
command, err := raw.(CommandFactory)()
|
||||
|
@ -298,6 +311,14 @@ func (c *CLI) init() {
|
|||
c.HelpWriter = os.Stderr
|
||||
}
|
||||
|
||||
// Build our hidden commands
|
||||
if len(c.HiddenCommands) > 0 {
|
||||
c.commandHidden = make(map[string]struct{})
|
||||
for _, h := range c.HiddenCommands {
|
||||
c.commandHidden[h] = struct{}{}
|
||||
}
|
||||
}
|
||||
|
||||
// Build our command tree
|
||||
c.commandTree = radix.New()
|
||||
c.commandNested = false
|
||||
|
@ -398,32 +419,32 @@ func (c *CLI) initAutocomplete() {
|
|||
func (c *CLI) initAutocompleteSub(prefix string) complete.Command {
|
||||
var cmd complete.Command
|
||||
walkFn := func(k string, raw interface{}) bool {
|
||||
// Keep track of the full key so that we can nest further if necessary
|
||||
fullKey := k
|
||||
|
||||
if len(prefix) > 0 {
|
||||
// If we have a prefix, trim the prefix + 1 (for the space)
|
||||
// Example: turns "sub one" to "one" with prefix "sub"
|
||||
k = k[len(prefix)+1:]
|
||||
}
|
||||
|
||||
// Keep track of the full key so that we can nest further if necessary
|
||||
fullKey := k
|
||||
|
||||
if idx := strings.LastIndex(k, " "); idx >= 0 {
|
||||
// If there is a space, we trim up to the space
|
||||
if idx := strings.Index(k, " "); idx >= 0 {
|
||||
// If there is a space, we trim up to the space. This turns
|
||||
// "sub sub2 sub3" into "sub". The prefix trim above will
|
||||
// trim our current depth properly.
|
||||
k = k[:idx]
|
||||
}
|
||||
|
||||
if idx := strings.LastIndex(k, " "); idx >= 0 {
|
||||
// This catches the scenario just in case where we see "sub one"
|
||||
// before "sub". This will let us properly setup the subcommand
|
||||
// regardless.
|
||||
k = k[idx+1:]
|
||||
}
|
||||
|
||||
if _, ok := cmd.Sub[k]; ok {
|
||||
// If we already tracked this subcommand then ignore
|
||||
return false
|
||||
}
|
||||
|
||||
// If the command is hidden, don't record it at all
|
||||
if _, ok := c.commandHidden[fullKey]; ok {
|
||||
return false
|
||||
}
|
||||
|
||||
if cmd.Sub == nil {
|
||||
cmd.Sub = complete.Commands(make(map[string]complete.Command))
|
||||
}
|
||||
|
@ -571,6 +592,11 @@ func (c *CLI) helpCommands(prefix string) map[string]CommandFactory {
|
|||
panic("not found: " + k)
|
||||
}
|
||||
|
||||
// If this is a hidden command, don't show it
|
||||
if _, ok := c.commandHidden[k]; ok {
|
||||
continue
|
||||
}
|
||||
|
||||
result[k] = raw.(CommandFactory)
|
||||
}
|
||||
|
||||
|
|
|
@ -1366,10 +1366,10 @@
|
|||
"revisionTime": "2017-02-11T01:34:15Z"
|
||||
},
|
||||
{
|
||||
"checksumSHA1": "7SY5eTKPGF0BjyByXfKhZAAqnKc=",
|
||||
"checksumSHA1": "9VcI9QGCShWIUIL187qRd4sxwb8=",
|
||||
"path": "github.com/hashicorp/go-getter",
|
||||
"revision": "56c651a79a6eec93e6ef074fe9e57fefb26b8b85",
|
||||
"revisionTime": "2017-09-14T15:44:44Z",
|
||||
"revision": "a686900cb3753aa644dc4812be91ceaf9fdd3b98",
|
||||
"revisionTime": "2017-09-22T19:29:48Z",
|
||||
"version": "master",
|
||||
"versionExact": "master"
|
||||
},
|
||||
|
@ -1723,10 +1723,10 @@
|
|||
"revisionTime": "2017-01-23T01:43:24Z"
|
||||
},
|
||||
{
|
||||
"checksumSHA1": "KXrCoifaKi3Wy4zbCfXTtM/FO48=",
|
||||
"checksumSHA1": "UIqCj7qI0hhIMpAhS9YYqs2jD48=",
|
||||
"path": "github.com/mitchellh/cli",
|
||||
"revision": "b633c78680fa6fb27ac81694f38c28f79602ebd9",
|
||||
"revisionTime": "2017-08-14T15:07:37Z"
|
||||
"revision": "65fcae5817c8600da98ada9d7edf26dd1a84837b",
|
||||
"revisionTime": "2017-09-08T18:10:43Z"
|
||||
},
|
||||
{
|
||||
"checksumSHA1": "ttEN1Aupb7xpPMkQLqb3tzLFdXs=",
|
||||
|
|
|
@ -0,0 +1,77 @@
|
|||
---
|
||||
layout: "docs"
|
||||
page_title: "CLI Configuration"
|
||||
sidebar_current: "docs-commands-cli-config"
|
||||
description: |-
|
||||
The general behavior of the Terraform CLI can be customized using the CLI
|
||||
configuration file.
|
||||
---
|
||||
|
||||
# CLI Configuration File
|
||||
|
||||
The CLI configuration file allows customization of some behaviors of the
|
||||
Terraform CLI in general. This is separate from
|
||||
[your infrastructure configuration](/docs/configuration/index.html), and
|
||||
provides per-user customization that applies regardless of which working
|
||||
directory Terraform is being applied to.
|
||||
|
||||
For example, the CLI configuration file can be used to activate a shared
|
||||
plugin cache directory that allows provider plugins to be shared between
|
||||
different working directories, as described in more detail below.
|
||||
|
||||
The configuration is placed in a single file whose location depends on the
|
||||
host operating system:
|
||||
|
||||
* On Windows, the file must be named named `terraform.rc` and placed
|
||||
in the relevant user's "Application Data" directory. The physical location
|
||||
of this directory depends on your Windows version and system configuration;
|
||||
use `$env:APPDATA` in PowerShell to find its location on your system.
|
||||
* On all other systems, the file must be named `.terraformrc` (note
|
||||
the leading period) and placed directly in the home directory
|
||||
of the relevant user.
|
||||
|
||||
On Windows, beware of Windows Explorer's default behavior of hiding filename
|
||||
extensions. Terraform will not recognize a file named `terraform.rc.txt` as a
|
||||
CLI configuration file, even though Windows Explorer may _display_ its name
|
||||
as just `terraform.rc`. Use `dir` from PowerShell or Command Prompt to
|
||||
confirm the filename.
|
||||
|
||||
## Configuration File Syntax
|
||||
|
||||
The configuration file uses the same _HCL_ syntax as `.tf` files, but with
|
||||
different attributes and blocks. The following example illustrates the
|
||||
general syntax; see the following section for information on the meaning
|
||||
of each of these settings:
|
||||
|
||||
```hcl
|
||||
plugin_cache_dir = "$HOME/.terraform.d/plugin-cache"
|
||||
disable_checkpoint = true
|
||||
```
|
||||
|
||||
## Available Settings
|
||||
|
||||
The following settings can be set in the CLI configuration file:
|
||||
|
||||
* `disable_checkpoint` - when set to `true`, disables
|
||||
[upgrade and security bulletin checks](/docs/commands/index.html#upgrade-and-security-bulletin-checks)
|
||||
that require reaching out to HashiCorp-provided network services.
|
||||
|
||||
* `disable_checkpoint_signature` - when set to `true`, allows the upgrade and
|
||||
security bulletin checks described above but disables the use of an anonymous
|
||||
id used to de-duplicate warning messages.
|
||||
|
||||
* `plugin_cache_dir` - enables
|
||||
[plugin caching](/docs/configuration/providers.html#provider-plugin-cache)
|
||||
and specifies, as a string, the location of the plugin cache directory.
|
||||
|
||||
## Deprecated Settings
|
||||
|
||||
The following settings are supported for backward compatibility but are no
|
||||
longer recommended for use:
|
||||
|
||||
* `providers` - a configuration block that allows specifying the locations of
|
||||
specific plugins for each named provider. This mechanism is deprecated
|
||||
because it is unable to specify a version number for each plugin, and thus
|
||||
it does not co-operate with the plugin versioning mechansim. Instead,
|
||||
place the plugin executable files in
|
||||
[the third-party plugins directory](/docs/configuration/providers.html#third-party-plugins).
|
|
@ -73,6 +73,31 @@ Usage: terraform graph [options] PATH
|
|||
to read this format.
|
||||
```
|
||||
|
||||
## Shell Tab-completion
|
||||
|
||||
If you use either `bash` or `zsh` as your command shell, Terraform can provide
|
||||
tab-completion support for all command names and (at this time) _some_ command
|
||||
arguments.
|
||||
|
||||
To add the necessary commands to your shell profile, run the following command:
|
||||
|
||||
```bash
|
||||
terraform -install-autocomplete
|
||||
```
|
||||
|
||||
After installation, it is necessary to restart your shell or to re-read its
|
||||
profile script before completion will be activated.
|
||||
|
||||
To uninstall the completion hook, assuming that it has not been modified
|
||||
manually in the shell profile, run the following command:
|
||||
|
||||
```bash
|
||||
terraform -uninstall-autocomplete
|
||||
```
|
||||
|
||||
Currently not all of Terraform's subcommands have full tab-completion support
|
||||
for all arguments. We plan to improve tab-completion coverage over time.
|
||||
|
||||
## Upgrade and Security Bulletin Checks
|
||||
|
||||
The Terraform CLI commands interact with the HashiCorp service
|
||||
|
@ -91,13 +116,10 @@ optional and can be disabled.
|
|||
Checkpoint itself can be entirely disabled for all HashiCorp products by
|
||||
setting the environment variable `CHECKPOINT_DISABLE` to any non-empty value.
|
||||
|
||||
Alternatively, settings in Terraform's global configuration file can be used
|
||||
to disable checkpoint features. On Unix systems this file is named
|
||||
`.terraformrc` and is placed within the home directory of the user running
|
||||
Terraform. On Windows, this file is named `terraform.rc` and is and is placed
|
||||
in the current user's _Application Data_ folder.
|
||||
|
||||
The following checkpoint-related settings are supported in this file:
|
||||
Alternatively, settings in
|
||||
[the CLI configuration file](/docs/commands/cli-config.html) can be used to
|
||||
disable checkpoint features. The following checkpoint-related settings are
|
||||
supported in this file:
|
||||
|
||||
* `disable_checkpoint` - set to `true` to disable checkpoint calls
|
||||
entirely. This is similar to the `CHECKPOINT_DISABLE` environment variable
|
||||
|
|
|
@ -145,6 +145,10 @@ syntax `name(arg, arg2, ...)`. For example, to read a file:
|
|||
|
||||
The supported built-in functions are:
|
||||
|
||||
* `abs(float)` - Returns the absolute value of a given float.
|
||||
Example: `abs(1)` returns `1`, and `abs(-1)` would also return `1`,
|
||||
whereas `abs(-3.14)` would return `3.14`. See also the `signum` function.
|
||||
|
||||
* `basename(path)` - Returns the last element of a path.
|
||||
|
||||
* `base64decode(string)` - Given a base64-encoded string, decodes it and
|
||||
|
@ -358,7 +362,7 @@ The supported built-in functions are:
|
|||
SHA-512 hash of the given string.
|
||||
Example: `"${sha512("${aws_vpc.default.tags.customer}-s3-bucket")}"`
|
||||
|
||||
* `signum(int)` - Returns `-1` for negative numbers, `0` for `0` and `1` for positive numbers.
|
||||
* `signum(integer)` - Returns `-1` for negative numbers, `0` for `0` and `1` for positive numbers.
|
||||
This function is useful when you need to set a value for the first resource and
|
||||
a different value for the rest of the resources.
|
||||
Example: `element(split(",", var.r53_failover_policy), signum(count.index))`
|
||||
|
@ -388,6 +392,8 @@ The supported built-in functions are:
|
|||
|
||||
* `title(string)` - Returns a copy of the string with the first characters of all the words capitalized.
|
||||
|
||||
* `transpose(map)` - Swaps the keys and list values in a map of lists of strings. For example, transpose(map("a", list("1", "2"), "b", list("2", "3")) produces a value equivalent to map("1", list("a"), "2", list("a", "b"), "3", list("b")).
|
||||
|
||||
* `trimspace(string)` - Returns a copy of the string with all leading and trailing white spaces removed.
|
||||
|
||||
* `upper(string)` - Returns a copy of the string with all Unicode letters mapped to their upper case.
|
||||
|
|
|
@ -185,4 +185,93 @@ provider "aws" {
|
|||
}
|
||||
```
|
||||
|
||||
-> **NOTE:** Because providers are one of the first things loaded when Terraform parses the graph, it is not possible to use the output from modules or resources as inputs to the provider. At this time, only [variables](/docs/configuration/variables.html) and [data sources](/docs/configuration/data-sources.html), including [remote state](/docs/providers/terraform/d/remote_state.html) may be used in an interpolation inside a provider stanza.
|
||||
An exception to this is the special `version` attribute that applies to all `provider` blocks for specifying [provider versions](#provider-versions); interpolation is not supported for provider versions since provider compatibility is a property of the configuration rather than something dynamic, and provider plugin installation happens too early for variables to be resolvable in this context.
|
||||
|
||||
-> **NOTE:** Because providers are one of the first things loaded when Terraform parses the graph, it is not possible to
|
||||
use the output from modules or resources as inputs to the provider. At this time, only
|
||||
[variables](/docs/configuration/variables.html) and [data sources](/docs/configuration/data-sources.html), including
|
||||
[remote state](/docs/providers/terraform/d/remote_state.html) may be used in an interpolation inside a provider stanza.
|
||||
[Local values](/docs/configuration/locals.html) can also be used, but currently may fail when running `terraform destroy`.
|
||||
|
||||
|
||||
## Third-party Plugins
|
||||
|
||||
At present Terraform can automatically install only the providers distributed
|
||||
by HashiCorp. Third-party providers can be manually installed by placing
|
||||
their plugin executables in one of the following locations depending on the
|
||||
host operating system:
|
||||
|
||||
* On Windows, in the sub-path `terraform.d/plugins` beneath your user's
|
||||
"Application Data" directory.
|
||||
* On all other systems, in the sub-path `.terraform.d/plugins` in your
|
||||
user's home directory.
|
||||
|
||||
`terraform init` will search this directory for additional plugins during
|
||||
plugin initialization.
|
||||
|
||||
The naming scheme for provider plugins is `terraform-provider-NAME-vX.Y.Z`,
|
||||
and Terraform uses the name to understand the name and version of a particular
|
||||
provider binary. Third-party plugins will often be distributed with an
|
||||
appropriate filename already set in the distribution archive so that it can
|
||||
be extracted directly into the plugin directory described above.
|
||||
|
||||
## Provider Plugin Cache
|
||||
|
||||
By default, `terraform init` downloads plugins into a subdirectory of the
|
||||
working directory so that each working directory is self-contained. As a
|
||||
consequence, if you have multiple configurations that use the same provider
|
||||
then a separate copy of its plugin will be downloaded for each configuration.
|
||||
|
||||
Given that provider plugins can be quite large (on the order of hundreds of
|
||||
megabytes), this default behavior can be inconvenient for those with slow
|
||||
or metered Internet connections. Therefore Terraform optionally allows the
|
||||
use of a local directory as a shared plugin cache, which then allows each
|
||||
distinct plugin binary to be downloaded only once.
|
||||
|
||||
To enable the plugin cache, use the `plugin_cache_dir` setting in
|
||||
[the CLI configuration file](https://www.terraform.io/docs/commands/cli-config.html).
|
||||
For example:
|
||||
|
||||
```hcl
|
||||
# (Note that the CLI configuration file is _not_ the same as the .tf files
|
||||
# used to configure infrastructure.)
|
||||
|
||||
plugin_cache_dir = "$HOME/.terraform.d/plugin-cache"
|
||||
```
|
||||
|
||||
Please note that on Windows it is necessary to use forward slash separators
|
||||
(`/`) rather than the conventional backslash (`\`) since the configuration
|
||||
file parser considers a backslash to begin an escape sequence.
|
||||
|
||||
Setting this in the configuration file is the recommended approach for a
|
||||
persistent setting. Alternatively, the `TF_PLUGIN_CACHE_DIR` environment
|
||||
variable can be used to enable caching or to override an existing cache
|
||||
directory within a particular shell session:
|
||||
|
||||
```bash
|
||||
export TF_PLUGIN_CACHE_DIR="~/.terraform.d/plugin-cache"
|
||||
```
|
||||
|
||||
When a plugin cache directory is enabled, the `terraform init` command will
|
||||
still access the plugin distribution server to obtain metadata about which
|
||||
plugins are available, but once a suitable version has been selected it will
|
||||
first check to see if the selected plugin is already available in the cache
|
||||
directory. If so, the already-downloaded plugin binary will be used.
|
||||
|
||||
If the selected plugin is not already in the cache, it will be downloaded
|
||||
into the cache first and then copied from there into the correct location
|
||||
under your current working directory.
|
||||
|
||||
When possible, Terraform will use hardlinks or symlinks to avoid storing
|
||||
a separate copy of a cached plugin in multiple directories. At present, this
|
||||
is not supported on Windows and instead a copy is always created.
|
||||
|
||||
The plugin cache directory must *not* be the third-party plugin directory
|
||||
or any other directory Terraform searches for pre-installed plugins, since
|
||||
the cache management logic conflicts with the normal plugin discovery logic
|
||||
when operating on the same directory.
|
||||
|
||||
Please note that Terraform will never itself delete a plugin from the
|
||||
plugin cache once it's been placed there. Over time, as plugins are upgraded,
|
||||
the cache directory may grow to contain several unused versions which must be
|
||||
manually deleted.
|
||||
|
|
|
@ -27,7 +27,7 @@ such as bash. Plugins are executed as a separate process and communicate with
|
|||
the main Terraform binary over an RPC interface.
|
||||
|
||||
More details are available in
|
||||
[Internal Docs](/docs/internals/internal-plugins.html).
|
||||
_[Plugin Internals](/docs/internals/internal-plugins.html)_.
|
||||
|
||||
The code within the binaries must adhere to certain interfaces.
|
||||
The network communication and RPC is handled automatically by higher-level
|
||||
|
@ -36,24 +36,13 @@ in its respective documentation section.
|
|||
|
||||
## Installing a Plugin
|
||||
|
||||
To install a plugin, put the binary somewhere on your filesystem, then
|
||||
configure Terraform to be able to find it. The configuration where plugins
|
||||
are defined is `~/.terraformrc` for Unix-like systems and
|
||||
`%APPDATA%/terraform.rc` for Windows.
|
||||
To install a plugin distributed by a third party developer, place the binary
|
||||
(extracted from any containing zip file) in
|
||||
[the third-party plugins directory](/docs/configuration/providers.html#third-party-plugins).
|
||||
|
||||
An example that configures a new provider is shown below:
|
||||
|
||||
```hcl
|
||||
providers {
|
||||
privatecloud = "/path/to/privatecloud"
|
||||
}
|
||||
```
|
||||
|
||||
The key `privatecloud` is the _prefix_ of the resources for that provider.
|
||||
For example, if there is `privatecloud_instance` resource, then the above
|
||||
configuration would work. The value is the name of the executable. This
|
||||
can be a full path. If it isn't a full path, the executable will be looked
|
||||
up on the `PATH`.
|
||||
Provider plugin binaries are named with the prefix `terraform-provider-`,
|
||||
while provisioner plugins have the prefix `terraform-provisioner-`. Both
|
||||
are placed in the same directory.
|
||||
|
||||
## Developing a Plugin
|
||||
|
||||
|
@ -73,7 +62,12 @@ is your GitHub username and `NAME` is the name of the plugin you're
|
|||
developing. This structure is what Go expects and simplifies things down
|
||||
the road.
|
||||
|
||||
With the directory made, create a `main.go` file. This project will
|
||||
The `NAME` should either begin with `provider-` or `provisioner-`,
|
||||
depending on what kind of plugin it will be. The repository name will,
|
||||
by default, be the name of the binary produced by `go install` for
|
||||
your plugin package.
|
||||
|
||||
With the package directory made, create a `main.go` file. This project will
|
||||
be a binary so the package is "main":
|
||||
|
||||
```golang
|
||||
|
@ -88,13 +82,13 @@ func main() {
|
|||
}
|
||||
```
|
||||
|
||||
And that's basically it! You'll have to change the argument given to
|
||||
`plugin.Serve` to be your actual plugin, but that is the only change
|
||||
you'll have to make. The argument should be a structure implementing
|
||||
one of the plugin interfaces (depending on what sort of plugin
|
||||
you're creating).
|
||||
The name `MyPlugin` is a placeholder for the struct type that represents
|
||||
your plugin's implementation. This must implement either
|
||||
`terraform.ResourceProvider` or `terraform.ResourceProvisioner`, depending
|
||||
on the plugin type.
|
||||
|
||||
Terraform plugins must follow a very specific naming convention of
|
||||
`terraform-TYPE-NAME`. For example, `terraform-provider-aws`, which
|
||||
tells Terraform that the plugin is a provider that can be referenced
|
||||
as "aws".
|
||||
To test your plugin, the easiest method is to copy your `terraform` binary
|
||||
to `$GOPATH/bin` and ensure that this copy is the one being used for testing.
|
||||
`terraform init` will search for plugins within the same directory as the
|
||||
`terraform` binary, and `$GOPATH/bin` is the directory into which `go install`
|
||||
will place the plugin executable.
|
||||
|
|
|
@ -25,7 +25,7 @@ module "consul" {
|
|||
}
|
||||
```
|
||||
|
||||
~> **Note:** Module registry integration was added in Terraform 10.6
|
||||
~> **Note:** Module registry integration was added in Terraform v0.10.6
|
||||
|
||||
You can also publish your own modules on the Terraform Registry. You may
|
||||
use the [public registry](https://registry.terraform.io) for public modules.
|
||||
|
|
|
@ -148,6 +148,10 @@
|
|||
<li<%= sidebar_current("docs-commands-workspace") %>>
|
||||
<a href="/docs/commands/workspace/index.html">workspace</a>
|
||||
</li>
|
||||
|
||||
<li<%= sidebar_current("docs-commands-cli-config") %>>
|
||||
<a href="/docs/commands/cli-config.html">CLI Config File</a>
|
||||
</li>
|
||||
</ul>
|
||||
</li>
|
||||
|
||||
|
@ -489,12 +493,16 @@
|
|||
<a href="/docs/provisioners/local-exec.html">local-exec</a>
|
||||
</li>
|
||||
|
||||
<li<%= sidebar_current("docs-provisioners-null-resource") %>>
|
||||
<a href="/docs/provisioners/null_resource.html">null_resource</a>
|
||||
</li>
|
||||
|
||||
<li<%= sidebar_current("docs-provisioners-remote") %>>
|
||||
<a href="/docs/provisioners/remote-exec.html">remote-exec</a>
|
||||
</li>
|
||||
|
||||
<li<%= sidebar_current("docs-provisioners-null-resource") %>>
|
||||
<a href="/docs/provisioners/null_resource.html">null_resource</a>
|
||||
<li<%= sidebar_current("docs-provisioners-salt-masterless") %>>
|
||||
<a href="/docs/provisioners/salt-masterless.html">salt-masterless</a>
|
||||
</li>
|
||||
</ul>
|
||||
</li>
|
||||
|
|
Loading…
Reference in New Issue