Merge remote-tracking branch 'origin/master' into statuscake_adding_contact_group
This commit is contained in:
commit
3ff92224d2
|
@ -373,7 +373,7 @@ to a single resource. Most tests follow a similar structure.
|
|||
|
||||
1. Pre-flight checks are made to ensure that sufficient provider configuration
|
||||
is available to be able to proceed - for example in an acceptance test
|
||||
targetting AWS, `AWS_ACCESS_KEY_ID` and `AWS_SECRET_KEY` must be set prior
|
||||
targetting AWS, `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` must be set prior
|
||||
to running acceptance tests. This is common to all tests exercising a single
|
||||
provider.
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
sudo: false
|
||||
language: go
|
||||
go:
|
||||
- 1.6
|
||||
- 1.7.3
|
||||
install:
|
||||
# This script is used by the Travis build to install a cookie for
|
||||
# go.googlesource.com so rate limits are higher when using `go get` to fetch
|
||||
|
|
1249
CHANGELOG.md
1249
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
24
Makefile
24
Makefile
|
@ -1,4 +1,4 @@
|
|||
TEST?=$$(go list ./... | grep -v /vendor/)
|
||||
TEST?=$$(go list ./... | grep -v '/terraform/vendor/' | grep -v '/builtin/bins/')
|
||||
VETARGS?=-all
|
||||
GOFMT_FILES?=$$(find . -name '*.go' | grep -v vendor)
|
||||
|
||||
|
@ -29,16 +29,17 @@ core-dev: generate
|
|||
|
||||
# Shorthand for quickly testing the core of Terraform (i.e. "not providers")
|
||||
core-test: generate
|
||||
@echo "Testing core packages..." && go test -tags 'core' $(shell go list ./... | grep -v -E 'builtin|vendor')
|
||||
@echo "Testing core packages..." && \
|
||||
go test -tags 'core' $(TESTARGS) $(shell go list ./... | grep -v -E 'terraform/(builtin|vendor)')
|
||||
|
||||
# Shorthand for building and installing just one plugin for local testing.
|
||||
# Run as (for example): make plugin-dev PLUGIN=provider-aws
|
||||
plugin-dev: fmtcheck generate
|
||||
plugin-dev: generate
|
||||
go install github.com/hashicorp/terraform/builtin/bins/$(PLUGIN)
|
||||
mv $(GOPATH)/bin/$(PLUGIN) $(GOPATH)/bin/terraform-$(PLUGIN)
|
||||
|
||||
# test runs the unit tests
|
||||
test: fmtcheck generate
|
||||
test: fmtcheck errcheck generate
|
||||
TF_ACC= go test $(TEST) $(TESTARGS) -timeout=30s -parallel=4
|
||||
|
||||
# testacc runs acceptance tests
|
||||
|
@ -50,6 +51,14 @@ testacc: fmtcheck generate
|
|||
fi
|
||||
TF_ACC=1 go test $(TEST) -v $(TESTARGS) -timeout 120m
|
||||
|
||||
test-compile: fmtcheck generate
|
||||
@if [ "$(TEST)" = "./..." ]; then \
|
||||
echo "ERROR: Set TEST to a specific package. For example,"; \
|
||||
echo " make test-compile TEST=./builtin/providers/aws"; \
|
||||
exit 1; \
|
||||
fi
|
||||
go test -c $(TEST) $(TESTARGS)
|
||||
|
||||
# testrace runs the race checker
|
||||
testrace: fmtcheck generate
|
||||
TF_ACC= go test -race $(TEST) $(TESTARGS)
|
||||
|
@ -76,10 +85,10 @@ vet:
|
|||
# generate runs `go generate` to build the dynamically generated
|
||||
# source files.
|
||||
generate:
|
||||
@which stringer ; if [ $$? -ne 0 ]; then \
|
||||
@which stringer > /dev/null; if [ $$? -ne 0 ]; then \
|
||||
go get -u golang.org/x/tools/cmd/stringer; \
|
||||
fi
|
||||
go generate $$(go list ./... | grep -v /vendor/)
|
||||
go generate $$(go list ./... | grep -v /terraform/vendor/)
|
||||
@go fmt command/internal_plugin_list.go > /dev/null
|
||||
|
||||
fmt:
|
||||
|
@ -88,4 +97,7 @@ fmt:
|
|||
fmtcheck:
|
||||
@sh -c "'$(CURDIR)/scripts/gofmtcheck.sh'"
|
||||
|
||||
errcheck:
|
||||
@sh -c "'$(CURDIR)/scripts/errcheck.sh'"
|
||||
|
||||
.PHONY: bin default generate test vet fmt fmtcheck tools
|
||||
|
|
27
README.md
27
README.md
|
@ -2,7 +2,7 @@ Terraform
|
|||
=========
|
||||
|
||||
- Website: http://www.terraform.io
|
||||
- IRC: `#terraform-tool` on Freenode
|
||||
- [![Gitter chat](https://badges.gitter.im/hashicorp-terraform/Lobby.png)](https://gitter.im/hashicorp-terraform/Lobby)
|
||||
- Mailing list: [Google Groups](http://groups.google.com/group/terraform-tool)
|
||||
|
||||
![Terraform](https://raw.githubusercontent.com/hashicorp/terraform/master/website/source/assets/images/readme.png)
|
||||
|
@ -29,13 +29,14 @@ All documentation is available on the [Terraform website](http://www.terraform.i
|
|||
Developing Terraform
|
||||
--------------------
|
||||
|
||||
If you wish to work on Terraform itself or any of its built-in providers, you'll first need [Go](http://www.golang.org) installed on your machine (version 1.6+ is *required*). Alternatively, you can use the Vagrantfile in the root of this repo to stand up a virtual machine with the appropriate dev tooling already set up for you.
|
||||
If you wish to work on Terraform itself or any of its built-in providers, you'll first need [Go](http://www.golang.org) installed on your machine (version 1.7+ is *required*). Alternatively, you can use the Vagrantfile in the root of this repo to stand up a virtual machine with the appropriate dev tooling already set up for you.
|
||||
|
||||
For local dev first make sure Go is properly installed, including setting up a [GOPATH](http://golang.org/doc/code.html#GOPATH). You will also need to add `$GOPATH/bin` to your `$PATH`.
|
||||
|
||||
Next, using [Git](https://git-scm.com/), clone this repository into `$GOPATH/src/github.com/hashicorp/terraform`. All the necessary dependencies are either vendored or automatically installed, so you just need to type `make`. This will compile the code and then run the tests. If this exits with exit status 0, then everything is working!
|
||||
|
||||
```sh
|
||||
$ cd $GOPATH/src/github.com/hashicorp/terraform
|
||||
$ make
|
||||
```
|
||||
|
||||
|
@ -83,15 +84,15 @@ Assuming your work is on a branch called `my-feature-branch`, the steps look lik
|
|||
|
||||
1. Add the new package to your GOPATH:
|
||||
|
||||
```bash
|
||||
go get github.com/hashicorp/my-project
|
||||
```
|
||||
```bash
|
||||
go get github.com/hashicorp/my-project
|
||||
```
|
||||
|
||||
2. Add the new package to your vendor/ directory:
|
||||
|
||||
```bash
|
||||
govendor add github.com/hashicorp/my-project/package
|
||||
```
|
||||
```bash
|
||||
govendor add github.com/hashicorp/my-project/package
|
||||
```
|
||||
|
||||
3. Review the changes in git and commit them.
|
||||
|
||||
|
@ -101,9 +102,9 @@ To update a dependency:
|
|||
|
||||
1. Fetch the dependency:
|
||||
|
||||
```bash
|
||||
govendor fetch github.com/hashicorp/my-project
|
||||
```
|
||||
```bash
|
||||
govendor fetch github.com/hashicorp/my-project
|
||||
```
|
||||
|
||||
2. Review the changes in git and commit them.
|
||||
|
||||
|
@ -118,10 +119,10 @@ built-in providers. Our [Contributing Guide](https://github.com/hashicorp/terraf
|
|||
|
||||
If you wish to cross-compile Terraform for another architecture, you can set the `XC_OS` and `XC_ARCH` environment variables to values representing the target operating system and architecture before calling `make`. The output is placed in the `pkg` subdirectory tree both expanded in a directory representing the OS/architecture combination and as a ZIP archive.
|
||||
|
||||
For example, to compile 64-bit Linux binaries on Mac OS X Linux, you can run:
|
||||
For example, to compile 64-bit Linux binaries on Mac OS X, you can run:
|
||||
|
||||
```sh
|
||||
$ XC_OS=linux XC_ARCH=amd64 make bin
|
||||
$ XC_OS=linux XC_ARCH=amd64 make bin
|
||||
...
|
||||
$ file pkg/linux_amd64/terraform
|
||||
terraform: ELF 64-bit LSB executable, x86-64, version 1 (SYSV), statically linked, not stripped
|
||||
|
|
|
@ -5,50 +5,65 @@
|
|||
VAGRANTFILE_API_VERSION = "2"
|
||||
|
||||
$script = <<SCRIPT
|
||||
GOVERSION="1.6"
|
||||
GOVERSION="1.7.3"
|
||||
SRCROOT="/opt/go"
|
||||
SRCPATH="/opt/gopath"
|
||||
|
||||
# Get the ARCH
|
||||
ARCH=`uname -m | sed 's|i686|386|' | sed 's|x86_64|amd64|'`
|
||||
ARCH="$(uname -m | sed 's|i686|386|' | sed 's|x86_64|amd64|')"
|
||||
|
||||
# Install Prereq Packages
|
||||
sudo apt-get update
|
||||
sudo apt-get upgrade -y
|
||||
sudo apt-get install -y build-essential curl git-core libpcre3-dev mercurial pkg-config zip
|
||||
export DEBIAN_PRIORITY=critical
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
export DEBCONF_NONINTERACTIVE_SEEN=true
|
||||
APT_OPTS="--yes --force-yes --no-install-suggests --no-install-recommends"
|
||||
echo "Upgrading packages ..."
|
||||
apt-get update ${APT_OPTS} >/dev/null
|
||||
apt-get upgrade ${APT_OPTS} >/dev/null
|
||||
echo "Installing prerequisites ..."
|
||||
apt-get install ${APT_OPTS} build-essential curl git-core libpcre3-dev mercurial pkg-config zip >/dev/null
|
||||
|
||||
# Install Go
|
||||
cd /tmp
|
||||
wget --quiet https://storage.googleapis.com/golang/go${GOVERSION}.linux-${ARCH}.tar.gz
|
||||
tar -xvf go${GOVERSION}.linux-${ARCH}.tar.gz
|
||||
sudo mv go $SRCROOT
|
||||
sudo chmod 775 $SRCROOT
|
||||
sudo chown vagrant:vagrant $SRCROOT
|
||||
echo "Downloading go (${GOVERSION}) ..."
|
||||
wget -P /tmp --quiet "https://storage.googleapis.com/golang/go${GOVERSION}.linux-${ARCH}.tar.gz"
|
||||
echo "Setting up go (${GOVERSION}) ..."
|
||||
tar -C /opt -xf "/tmp/go${GOVERSION}.linux-${ARCH}.tar.gz"
|
||||
chmod 775 "$SRCROOT"
|
||||
chown vagrant:vagrant "$SRCROOT"
|
||||
|
||||
# Setup the GOPATH; even though the shared folder spec gives the working
|
||||
# directory the right user/group, we need to set it properly on the
|
||||
# parent path to allow subsequent "go get" commands to work.
|
||||
sudo mkdir -p $SRCPATH
|
||||
sudo chown -R vagrant:vagrant $SRCPATH 2>/dev/null || true
|
||||
mkdir -p "$SRCPATH"
|
||||
chown -R vagrant:vagrant "$SRCPATH" 2>/dev/null || true
|
||||
# ^^ silencing errors here because we expect this to fail for the shared folder
|
||||
|
||||
cat <<EOF >/tmp/gopath.sh
|
||||
install -m0755 /dev/stdin /etc/profile.d/gopath.sh <<EOF
|
||||
export GOPATH="$SRCPATH"
|
||||
export GOROOT="$SRCROOT"
|
||||
export PATH="$SRCROOT/bin:$SRCPATH/bin:\$PATH"
|
||||
EOF
|
||||
sudo mv /tmp/gopath.sh /etc/profile.d/gopath.sh
|
||||
sudo chmod 0755 /etc/profile.d/gopath.sh
|
||||
source /etc/profile.d/gopath.sh
|
||||
|
||||
cat >>/home/vagrant/.bashrc <<EOF
|
||||
|
||||
## After login, change to terraform directory
|
||||
cd /opt/gopath/src/github.com/hashicorp/terraform
|
||||
EOF
|
||||
|
||||
SCRIPT
|
||||
|
||||
Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
|
||||
config.vm.box = "bento/ubuntu-12.04"
|
||||
config.vm.box = "bento/ubuntu-14.04"
|
||||
config.vm.hostname = "terraform"
|
||||
|
||||
config.vm.provision "shell", inline: $script, privileged: false
|
||||
config.vm.provision "prepare-shell", type: "shell", inline: "sudo sed -i '/tty/!s/mesg n/tty -s \\&\\& mesg n/' /root/.profile", privileged: false
|
||||
config.vm.provision "initial-setup", type: "shell", inline: $script
|
||||
config.vm.synced_folder '.', '/opt/gopath/src/github.com/hashicorp/terraform'
|
||||
|
||||
config.vm.provider "docker" do |v, override|
|
||||
override.vm.box = "tknerr/baseimage-ubuntu-14.04"
|
||||
end
|
||||
|
||||
["vmware_fusion", "vmware_workstation"].each do |p|
|
||||
config.vm.provider p do |v|
|
||||
v.vmx["memsize"] = "4096"
|
||||
|
@ -60,4 +75,9 @@ Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
|
|||
v.memory = 4096
|
||||
v.cpus = 2
|
||||
end
|
||||
|
||||
config.vm.provider "parallels" do |prl|
|
||||
prl.memory = 4096
|
||||
prl.cpus = 2
|
||||
end
|
||||
end
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"github.com/hashicorp/terraform/builtin/providers/archive"
|
||||
"github.com/hashicorp/terraform/plugin"
|
||||
)
|
||||
|
||||
func main() {
|
||||
plugin.Serve(&plugin.ServeOpts{
|
||||
ProviderFunc: archive.Provider,
|
||||
})
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"github.com/hashicorp/terraform/builtin/providers/bitbucket"
|
||||
"github.com/hashicorp/terraform/plugin"
|
||||
)
|
||||
|
||||
func main() {
|
||||
plugin.Serve(&plugin.ServeOpts{
|
||||
ProviderFunc: bitbucket.Provider,
|
||||
})
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"github.com/hashicorp/terraform/builtin/providers/logentries"
|
||||
"github.com/hashicorp/terraform/plugin"
|
||||
)
|
||||
|
||||
func main() {
|
||||
plugin.Serve(&plugin.ServeOpts{
|
||||
ProviderFunc: logentries.Provider,
|
||||
})
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
package main
|
|
@ -0,0 +1,12 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"github.com/hashicorp/terraform/builtin/providers/pagerduty"
|
||||
"github.com/hashicorp/terraform/plugin"
|
||||
)
|
||||
|
||||
func main() {
|
||||
plugin.Serve(&plugin.ServeOpts{
|
||||
ProviderFunc: pagerduty.Provider,
|
||||
})
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"github.com/hashicorp/terraform/builtin/providers/scaleway"
|
||||
"github.com/hashicorp/terraform/plugin"
|
||||
)
|
||||
|
||||
func main() {
|
||||
plugin.Serve(&plugin.ServeOpts{
|
||||
ProviderFunc: scaleway.Provider,
|
||||
})
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"github.com/hashicorp/terraform/builtin/providers/vault"
|
||||
"github.com/hashicorp/terraform/plugin"
|
||||
)
|
||||
|
||||
func main() {
|
||||
plugin.Serve(&plugin.ServeOpts{
|
||||
ProviderFunc: vault.Provider,
|
||||
})
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
*.zip
|
|
@ -0,0 +1,47 @@
|
|||
package archive
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
)
|
||||
|
||||
type Archiver interface {
|
||||
ArchiveContent(content []byte, infilename string) error
|
||||
ArchiveFile(infilename string) error
|
||||
ArchiveDir(indirname string) error
|
||||
}
|
||||
|
||||
type ArchiverBuilder func(filepath string) Archiver
|
||||
|
||||
var archiverBuilders = map[string]ArchiverBuilder{
|
||||
"zip": NewZipArchiver,
|
||||
}
|
||||
|
||||
func getArchiver(archiveType string, filepath string) Archiver {
|
||||
if builder, ok := archiverBuilders[archiveType]; ok {
|
||||
return builder(filepath)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func assertValidFile(infilename string) (os.FileInfo, error) {
|
||||
fi, err := os.Stat(infilename)
|
||||
if err != nil && os.IsNotExist(err) {
|
||||
return fi, fmt.Errorf("could not archive missing file: %s", infilename)
|
||||
}
|
||||
return fi, err
|
||||
}
|
||||
|
||||
func assertValidDir(indirname string) (os.FileInfo, error) {
|
||||
fi, err := os.Stat(indirname)
|
||||
if err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
return fi, fmt.Errorf("could not archive missing directory: %s", indirname)
|
||||
}
|
||||
return fi, err
|
||||
}
|
||||
if !fi.IsDir() {
|
||||
return fi, fmt.Errorf("could not archive directory that is a file: %s", indirname)
|
||||
}
|
||||
return fi, nil
|
||||
}
|
|
@ -0,0 +1,154 @@
|
|||
package archive
|
||||
|
||||
import (
|
||||
"crypto/sha1"
|
||||
"crypto/sha256"
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func dataSourceFile() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Read: dataSourceFileRead,
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"type": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
"source_content": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
ConflictsWith: []string{"source_file", "source_dir"},
|
||||
},
|
||||
"source_content_filename": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
ConflictsWith: []string{"source_file", "source_dir"},
|
||||
},
|
||||
"source_file": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
ConflictsWith: []string{"source_content", "source_content_filename", "source_dir"},
|
||||
},
|
||||
"source_dir": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
ConflictsWith: []string{"source_content", "source_content_filename", "source_file"},
|
||||
},
|
||||
"output_path": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
"output_size": &schema.Schema{
|
||||
Type: schema.TypeInt,
|
||||
Computed: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
"output_sha": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
ForceNew: true,
|
||||
Description: "SHA1 checksum of output file",
|
||||
},
|
||||
"output_base64sha256": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
ForceNew: true,
|
||||
Description: "Base64 Encoded SHA256 checksum of output file",
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func dataSourceFileRead(d *schema.ResourceData, meta interface{}) error {
|
||||
outputPath := d.Get("output_path").(string)
|
||||
|
||||
outputDirectory := path.Dir(outputPath)
|
||||
if outputDirectory != "" {
|
||||
if _, err := os.Stat(outputDirectory); err != nil {
|
||||
if err := os.MkdirAll(outputDirectory, 0755); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if err := archive(d); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Generate archived file stats
|
||||
fi, err := os.Stat(outputPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
sha1, base64sha256, err := genFileShas(outputPath)
|
||||
if err != nil {
|
||||
|
||||
return fmt.Errorf("could not generate file checksum sha256: %s", err)
|
||||
}
|
||||
d.Set("output_sha", sha1)
|
||||
d.Set("output_base64sha256", base64sha256)
|
||||
|
||||
d.Set("output_size", fi.Size())
|
||||
d.SetId(d.Get("output_sha").(string))
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func archive(d *schema.ResourceData) error {
|
||||
archiveType := d.Get("type").(string)
|
||||
outputPath := d.Get("output_path").(string)
|
||||
|
||||
archiver := getArchiver(archiveType, outputPath)
|
||||
if archiver == nil {
|
||||
return fmt.Errorf("archive type not supported: %s", archiveType)
|
||||
}
|
||||
|
||||
if dir, ok := d.GetOk("source_dir"); ok {
|
||||
if err := archiver.ArchiveDir(dir.(string)); err != nil {
|
||||
return fmt.Errorf("error archiving directory: %s", err)
|
||||
}
|
||||
} else if file, ok := d.GetOk("source_file"); ok {
|
||||
if err := archiver.ArchiveFile(file.(string)); err != nil {
|
||||
return fmt.Errorf("error archiving file: %s", err)
|
||||
}
|
||||
} else if filename, ok := d.GetOk("source_content_filename"); ok {
|
||||
content := d.Get("source_content").(string)
|
||||
if err := archiver.ArchiveContent([]byte(content), filename.(string)); err != nil {
|
||||
return fmt.Errorf("error archiving content: %s", err)
|
||||
}
|
||||
} else {
|
||||
return fmt.Errorf("one of 'source_dir', 'source_file', 'source_content_filename' must be specified")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func genFileShas(filename string) (string, string, error) {
|
||||
data, err := ioutil.ReadFile(filename)
|
||||
if err != nil {
|
||||
return "", "", fmt.Errorf("could not compute file '%s' checksum: %s", filename, err)
|
||||
}
|
||||
h := sha1.New()
|
||||
h.Write([]byte(data))
|
||||
sha1 := hex.EncodeToString(h.Sum(nil))
|
||||
|
||||
h256 := sha256.New()
|
||||
h256.Write([]byte(data))
|
||||
shaSum := h256.Sum(nil)
|
||||
sha256base64 := base64.StdEncoding.EncodeToString(shaSum[:])
|
||||
|
||||
return sha1, sha256base64, nil
|
||||
}
|
|
@ -0,0 +1,93 @@
|
|||
package archive
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
r "github.com/hashicorp/terraform/helper/resource"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
)
|
||||
|
||||
func TestAccArchiveFile_Basic(t *testing.T) {
|
||||
var fileSize string
|
||||
r.Test(t, r.TestCase{
|
||||
Providers: testProviders,
|
||||
Steps: []r.TestStep{
|
||||
r.TestStep{
|
||||
Config: testAccArchiveFileContentConfig,
|
||||
Check: r.ComposeTestCheckFunc(
|
||||
testAccArchiveFileExists("zip_file_acc_test.zip", &fileSize),
|
||||
r.TestCheckResourceAttrPtr("data.archive_file.foo", "output_size", &fileSize),
|
||||
),
|
||||
},
|
||||
r.TestStep{
|
||||
Config: testAccArchiveFileFileConfig,
|
||||
Check: r.ComposeTestCheckFunc(
|
||||
testAccArchiveFileExists("zip_file_acc_test.zip", &fileSize),
|
||||
r.TestCheckResourceAttrPtr("data.archive_file.foo", "output_size", &fileSize),
|
||||
),
|
||||
},
|
||||
r.TestStep{
|
||||
Config: testAccArchiveFileDirConfig,
|
||||
Check: r.ComposeTestCheckFunc(
|
||||
testAccArchiveFileExists("zip_file_acc_test.zip", &fileSize),
|
||||
r.TestCheckResourceAttrPtr("data.archive_file.foo", "output_size", &fileSize),
|
||||
),
|
||||
},
|
||||
r.TestStep{
|
||||
Config: testAccArchiveFileOutputPath,
|
||||
Check: r.ComposeTestCheckFunc(
|
||||
testAccArchiveFileExists(fmt.Sprintf("%s/test.zip", tmpDir), &fileSize),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func testAccArchiveFileExists(filename string, fileSize *string) r.TestCheckFunc {
|
||||
return func(s *terraform.State) error {
|
||||
*fileSize = ""
|
||||
fi, err := os.Stat(filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
*fileSize = fmt.Sprintf("%d", fi.Size())
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
var testAccArchiveFileContentConfig = `
|
||||
data "archive_file" "foo" {
|
||||
type = "zip"
|
||||
source_content = "This is some content"
|
||||
source_content_filename = "content.txt"
|
||||
output_path = "zip_file_acc_test.zip"
|
||||
}
|
||||
`
|
||||
|
||||
var tmpDir = os.TempDir() + "/test"
|
||||
var testAccArchiveFileOutputPath = fmt.Sprintf(`
|
||||
data "archive_file" "foo" {
|
||||
type = "zip"
|
||||
source_content = "This is some content"
|
||||
source_content_filename = "content.txt"
|
||||
output_path = "%s/test.zip"
|
||||
}
|
||||
`, tmpDir)
|
||||
|
||||
var testAccArchiveFileFileConfig = `
|
||||
data "archive_file" "foo" {
|
||||
type = "zip"
|
||||
source_file = "test-fixtures/test-file.txt"
|
||||
output_path = "zip_file_acc_test.zip"
|
||||
}
|
||||
`
|
||||
|
||||
var testAccArchiveFileDirConfig = `
|
||||
data "archive_file" "foo" {
|
||||
type = "zip"
|
||||
source_dir = "test-fixtures/test-dir"
|
||||
output_path = "zip_file_acc_test.zip"
|
||||
}
|
||||
`
|
|
@ -0,0 +1,20 @@
|
|||
package archive
|
||||
|
||||
import (
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
)
|
||||
|
||||
func Provider() terraform.ResourceProvider {
|
||||
return &schema.Provider{
|
||||
DataSourcesMap: map[string]*schema.Resource{
|
||||
"archive_file": dataSourceFile(),
|
||||
},
|
||||
ResourcesMap: map[string]*schema.Resource{
|
||||
"archive_file": schema.DataSourceResourceShim(
|
||||
"archive_file",
|
||||
dataSourceFile(),
|
||||
),
|
||||
},
|
||||
}
|
||||
}
|
|
@ -0,0 +1,18 @@
|
|||
package archive
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
)
|
||||
|
||||
var testProviders = map[string]terraform.ResourceProvider{
|
||||
"archive": Provider(),
|
||||
}
|
||||
|
||||
func TestProvider(t *testing.T) {
|
||||
if err := Provider().(*schema.Provider).InternalValidate(); err != nil {
|
||||
t.Fatalf("err: %s", err)
|
||||
}
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
This is file 1
|
|
@ -0,0 +1 @@
|
|||
This is file 2
|
|
@ -0,0 +1 @@
|
|||
This is file 3
|
|
@ -0,0 +1 @@
|
|||
This is test content
|
|
@ -0,0 +1,107 @@
|
|||
package archive
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
type ZipArchiver struct {
|
||||
filepath string
|
||||
filewriter *os.File
|
||||
writer *zip.Writer
|
||||
}
|
||||
|
||||
func NewZipArchiver(filepath string) Archiver {
|
||||
return &ZipArchiver{
|
||||
filepath: filepath,
|
||||
}
|
||||
}
|
||||
|
||||
func (a *ZipArchiver) ArchiveContent(content []byte, infilename string) error {
|
||||
if err := a.open(); err != nil {
|
||||
return err
|
||||
}
|
||||
defer a.close()
|
||||
|
||||
f, err := a.writer.Create(infilename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = f.Write(content)
|
||||
return err
|
||||
}
|
||||
|
||||
func (a *ZipArchiver) ArchiveFile(infilename string) error {
|
||||
fi, err := assertValidFile(infilename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
content, err := ioutil.ReadFile(infilename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return a.ArchiveContent(content, fi.Name())
|
||||
}
|
||||
|
||||
func (a *ZipArchiver) ArchiveDir(indirname string) error {
|
||||
_, err := assertValidDir(indirname)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := a.open(); err != nil {
|
||||
return err
|
||||
}
|
||||
defer a.close()
|
||||
|
||||
return filepath.Walk(indirname, func(path string, info os.FileInfo, err error) error {
|
||||
if info.IsDir() {
|
||||
return nil
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
relname, err := filepath.Rel(indirname, path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error relativizing file for archival: %s", err)
|
||||
}
|
||||
f, err := a.writer.Create(relname)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error creating file inside archive: %s", err)
|
||||
}
|
||||
content, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error reading file for archival: %s", err)
|
||||
}
|
||||
_, err = f.Write(content)
|
||||
return err
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
func (a *ZipArchiver) open() error {
|
||||
f, err := os.Create(a.filepath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
a.filewriter = f
|
||||
a.writer = zip.NewWriter(f)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *ZipArchiver) close() {
|
||||
if a.writer != nil {
|
||||
a.writer.Close()
|
||||
a.writer = nil
|
||||
}
|
||||
if a.filewriter != nil {
|
||||
a.filewriter.Close()
|
||||
a.filewriter = nil
|
||||
}
|
||||
}
|
|
@ -0,0 +1,84 @@
|
|||
package archive
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"io/ioutil"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestZipArchiver_Content(t *testing.T) {
|
||||
zipfilepath := "archive-content.zip"
|
||||
archiver := NewZipArchiver(zipfilepath)
|
||||
if err := archiver.ArchiveContent([]byte("This is some content"), "content.txt"); err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
|
||||
ensureContents(t, zipfilepath, map[string][]byte{
|
||||
"content.txt": []byte("This is some content"),
|
||||
})
|
||||
}
|
||||
|
||||
func TestZipArchiver_File(t *testing.T) {
|
||||
zipfilepath := "archive-file.zip"
|
||||
archiver := NewZipArchiver(zipfilepath)
|
||||
if err := archiver.ArchiveFile("./test-fixtures/test-file.txt"); err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
|
||||
ensureContents(t, zipfilepath, map[string][]byte{
|
||||
"test-file.txt": []byte("This is test content"),
|
||||
})
|
||||
}
|
||||
|
||||
func TestZipArchiver_Dir(t *testing.T) {
|
||||
zipfilepath := "archive-dir.zip"
|
||||
archiver := NewZipArchiver(zipfilepath)
|
||||
if err := archiver.ArchiveDir("./test-fixtures/test-dir"); err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
|
||||
ensureContents(t, zipfilepath, map[string][]byte{
|
||||
"file1.txt": []byte("This is file 1"),
|
||||
"file2.txt": []byte("This is file 2"),
|
||||
"file3.txt": []byte("This is file 3"),
|
||||
})
|
||||
}
|
||||
|
||||
func ensureContents(t *testing.T, zipfilepath string, wants map[string][]byte) {
|
||||
r, err := zip.OpenReader(zipfilepath)
|
||||
if err != nil {
|
||||
t.Fatalf("could not open zip file: %s", err)
|
||||
}
|
||||
defer r.Close()
|
||||
|
||||
if len(r.File) != len(wants) {
|
||||
t.Errorf("mismatched file count, got %d, want %d", len(r.File), len(wants))
|
||||
}
|
||||
for _, cf := range r.File {
|
||||
ensureContent(t, wants, cf)
|
||||
}
|
||||
}
|
||||
|
||||
func ensureContent(t *testing.T, wants map[string][]byte, got *zip.File) {
|
||||
want, ok := wants[got.Name]
|
||||
if !ok {
|
||||
t.Errorf("additional file in zip: %s", got.Name)
|
||||
return
|
||||
}
|
||||
|
||||
r, err := got.Open()
|
||||
if err != nil {
|
||||
t.Errorf("could not open file: %s", err)
|
||||
}
|
||||
defer r.Close()
|
||||
gotContentBytes, err := ioutil.ReadAll(r)
|
||||
if err != nil {
|
||||
t.Errorf("could not read file: %s", err)
|
||||
}
|
||||
|
||||
wantContent := string(want)
|
||||
gotContent := string(gotContentBytes)
|
||||
if gotContent != wantContent {
|
||||
t.Errorf("mismatched content\ngot\n%s\nwant\n%s", gotContent, wantContent)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,149 @@
|
|||
package atlas
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/hashicorp/atlas-go/v1"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func dataSourceAtlasArtifact() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Read: dataSourceArtifactRead,
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"name": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
|
||||
"type": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
|
||||
"build": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
|
||||
"version": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
|
||||
"metadata_keys": &schema.Schema{
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
Set: schema.HashString,
|
||||
},
|
||||
|
||||
"metadata": &schema.Schema{
|
||||
Type: schema.TypeMap,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
|
||||
"file_url": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"metadata_full": &schema.Schema{
|
||||
Type: schema.TypeMap,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"slug": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"version_real": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func dataSourceArtifactRead(d *schema.ResourceData, meta interface{}) error {
|
||||
client := meta.(*atlas.Client)
|
||||
|
||||
// Parse the slug from the name given of the artifact since the API
|
||||
// expects these to be split.
|
||||
user, name, err := atlas.ParseSlug(d.Get("name").(string))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Filter by version or build if given
|
||||
var build, version string
|
||||
if v, ok := d.GetOk("version"); ok {
|
||||
version = v.(string)
|
||||
} else if b, ok := d.GetOk("build"); ok {
|
||||
build = b.(string)
|
||||
}
|
||||
|
||||
// If we have neither, default to latest version
|
||||
if build == "" && version == "" {
|
||||
version = "latest"
|
||||
}
|
||||
|
||||
// Compile the metadata search params
|
||||
md := make(map[string]string)
|
||||
for _, v := range d.Get("metadata_keys").(*schema.Set).List() {
|
||||
md[v.(string)] = atlas.MetadataAnyValue
|
||||
}
|
||||
for k, v := range d.Get("metadata").(map[string]interface{}) {
|
||||
md[k] = v.(string)
|
||||
}
|
||||
|
||||
// Do the search!
|
||||
vs, err := client.ArtifactSearch(&atlas.ArtifactSearchOpts{
|
||||
User: user,
|
||||
Name: name,
|
||||
Type: d.Get("type").(string),
|
||||
Build: build,
|
||||
Version: version,
|
||||
Metadata: md,
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf(
|
||||
"Error searching for artifact '%s/%s': %s",
|
||||
user, name, err)
|
||||
}
|
||||
|
||||
if len(vs) == 0 {
|
||||
return fmt.Errorf("No matching artifact for '%s/%s'", user, name)
|
||||
} else if len(vs) > 1 {
|
||||
return fmt.Errorf(
|
||||
"Got %d results for '%s/%s', only one is allowed",
|
||||
len(vs), user, name)
|
||||
}
|
||||
v := vs[0]
|
||||
|
||||
d.SetId(v.ID)
|
||||
if v.ID == "" {
|
||||
d.SetId(fmt.Sprintf("%s %d", v.Tag, v.Version))
|
||||
}
|
||||
d.Set("version_real", v.Version)
|
||||
d.Set("metadata_full", cleanMetadata(v.Metadata))
|
||||
d.Set("slug", v.Slug)
|
||||
|
||||
d.Set("file_url", "")
|
||||
if u, err := client.ArtifactFileURL(v); err != nil {
|
||||
return fmt.Errorf(
|
||||
"Error reading file URL: %s", err)
|
||||
} else if u != nil {
|
||||
d.Set("file_url", u.String())
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
|
@ -0,0 +1,150 @@
|
|||
package atlas
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
)
|
||||
|
||||
func TestAccDataSourceArtifact_basic(t *testing.T) {
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: testAccDataArtifact_basic,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckDataArtifactState("name", "hashicorp/tf-provider-test"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestAccDataSourceArtifact_metadata(t *testing.T) {
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: testAccDataArtifact_metadata,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckDataArtifactState("name", "hashicorp/tf-provider-test"),
|
||||
testAccCheckDataArtifactState("id", "x86"),
|
||||
testAccCheckDataArtifactState("metadata_full.arch", "x86"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestAccDataSourceArtifact_metadataSet(t *testing.T) {
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: testAccDataArtifact_metadataSet,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckDataArtifactState("name", "hashicorp/tf-provider-test"),
|
||||
testAccCheckDataArtifactState("id", "x64"),
|
||||
testAccCheckDataArtifactState("metadata_full.arch", "x64"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestAccDataSourceArtifact_buildLatest(t *testing.T) {
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: testAccDataArtifact_buildLatest,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckDataArtifactState("name", "hashicorp/tf-provider-test"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestAccDataSourceArtifact_versionAny(t *testing.T) {
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: testAccDataArtifact_versionAny,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckDataArtifactState("name", "hashicorp/tf-provider-test"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func testAccCheckDataArtifactState(key, value string) resource.TestCheckFunc {
|
||||
return func(s *terraform.State) error {
|
||||
rs, ok := s.RootModule().Resources["data.atlas_artifact.foobar"]
|
||||
if !ok {
|
||||
return fmt.Errorf("Not found: %s", "data.atlas_artifact.foobar")
|
||||
}
|
||||
|
||||
if rs.Primary.ID == "" {
|
||||
return fmt.Errorf("No ID is set")
|
||||
}
|
||||
|
||||
p := rs.Primary
|
||||
if p.Attributes[key] != value {
|
||||
return fmt.Errorf(
|
||||
"%s != %s (actual: %s)", key, value, p.Attributes[key])
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
const testAccDataArtifact_basic = `
|
||||
data "atlas_artifact" "foobar" {
|
||||
name = "hashicorp/tf-provider-test"
|
||||
type = "foo"
|
||||
}`
|
||||
|
||||
const testAccDataArtifact_metadata = `
|
||||
data "atlas_artifact" "foobar" {
|
||||
name = "hashicorp/tf-provider-test"
|
||||
type = "foo"
|
||||
metadata {
|
||||
arch = "x86"
|
||||
}
|
||||
version = "any"
|
||||
}`
|
||||
|
||||
const testAccDataArtifact_metadataSet = `
|
||||
data "atlas_artifact" "foobar" {
|
||||
name = "hashicorp/tf-provider-test"
|
||||
type = "foo"
|
||||
metadata_keys = ["arch"]
|
||||
version = "any"
|
||||
}`
|
||||
|
||||
const testAccDataArtifact_buildLatest = `
|
||||
data "atlas_artifact" "foobar" {
|
||||
name = "hashicorp/tf-provider-test"
|
||||
type = "foo"
|
||||
build = "latest"
|
||||
metadata {
|
||||
arch = "x86"
|
||||
}
|
||||
}`
|
||||
|
||||
const testAccDataArtifact_versionAny = `
|
||||
data "atlas_artifact" "foobar" {
|
||||
name = "hashicorp/tf-provider-test"
|
||||
type = "foo"
|
||||
version = "any"
|
||||
}`
|
|
@ -31,6 +31,10 @@ func Provider() terraform.ResourceProvider {
|
|||
},
|
||||
},
|
||||
|
||||
DataSourcesMap: map[string]*schema.Resource{
|
||||
"atlas_artifact": dataSourceAtlasArtifact(),
|
||||
},
|
||||
|
||||
ResourcesMap: map[string]*schema.Resource{
|
||||
"atlas_artifact": resourceArtifact(),
|
||||
},
|
||||
|
@ -48,6 +52,7 @@ func providerConfigure(d *schema.ResourceData) (interface{}, error) {
|
|||
return nil, err
|
||||
}
|
||||
}
|
||||
client.DefaultHeader.Set(terraform.VersionHeader, terraform.VersionString())
|
||||
client.Token = d.Get("token").(string)
|
||||
|
||||
return client, nil
|
||||
|
|
|
@ -22,9 +22,10 @@ func resourceArtifact() *schema.Resource {
|
|||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"name": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
ForceNew: true,
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
ForceNew: true,
|
||||
Deprecated: `atlas_artifact is now deprecated. Use the Atlas Artifact Data Source instead. See https://terraform.io/docs/providers/atlas/d/artifact.html`,
|
||||
},
|
||||
|
||||
"type": &schema.Schema{
|
||||
|
|
|
@ -2,7 +2,6 @@ package atlas
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
|
@ -109,21 +108,6 @@ func testAccCheckArtifactState(key, value string) resource.TestCheckFunc {
|
|||
}
|
||||
}
|
||||
|
||||
func TestCleanMetadata(t *testing.T) {
|
||||
in := map[string]string{
|
||||
"region.us-east-1": "in",
|
||||
"what is this?": "out",
|
||||
}
|
||||
exp := map[string]string{
|
||||
"region-us-east-1": "in",
|
||||
"what-is-this-": "out",
|
||||
}
|
||||
out := cleanMetadata(in)
|
||||
if !reflect.DeepEqual(out, exp) {
|
||||
t.Fatalf("bad: %#v", out)
|
||||
}
|
||||
}
|
||||
|
||||
const testAccArtifact_basic = `
|
||||
resource "atlas_artifact" "foobar" {
|
||||
name = "hashicorp/tf-provider-test"
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
|
@ -11,43 +12,50 @@ import (
|
|||
"github.com/aws/aws-sdk-go/aws/awserr"
|
||||
awsCredentials "github.com/aws/aws-sdk-go/aws/credentials"
|
||||
"github.com/aws/aws-sdk-go/aws/credentials/ec2rolecreds"
|
||||
"github.com/aws/aws-sdk-go/aws/credentials/stscreds"
|
||||
"github.com/aws/aws-sdk-go/aws/ec2metadata"
|
||||
"github.com/aws/aws-sdk-go/aws/session"
|
||||
"github.com/aws/aws-sdk-go/service/iam"
|
||||
"github.com/aws/aws-sdk-go/service/sts"
|
||||
"github.com/hashicorp/errwrap"
|
||||
"github.com/hashicorp/go-cleanhttp"
|
||||
)
|
||||
|
||||
func GetAccountId(iamconn *iam.IAM, stsconn *sts.STS, authProviderName string) (string, error) {
|
||||
func GetAccountInfo(iamconn *iam.IAM, stsconn *sts.STS, authProviderName string) (string, string, error) {
|
||||
// If we have creds from instance profile, we can use metadata API
|
||||
if authProviderName == ec2rolecreds.ProviderName {
|
||||
log.Println("[DEBUG] Trying to get account ID via AWS Metadata API")
|
||||
|
||||
cfg := &aws.Config{}
|
||||
setOptionalEndpoint(cfg)
|
||||
metadataClient := ec2metadata.New(session.New(cfg))
|
||||
sess, err := session.NewSession(cfg)
|
||||
if err != nil {
|
||||
return "", "", errwrap.Wrapf("Error creating AWS session: %s", err)
|
||||
}
|
||||
|
||||
metadataClient := ec2metadata.New(sess)
|
||||
info, err := metadataClient.IAMInfo()
|
||||
if err != nil {
|
||||
// This can be triggered when no IAM Role is assigned
|
||||
// or AWS just happens to return invalid response
|
||||
return "", fmt.Errorf("Failed getting EC2 IAM info: %s", err)
|
||||
return "", "", fmt.Errorf("Failed getting EC2 IAM info: %s", err)
|
||||
}
|
||||
|
||||
return parseAccountIdFromArn(info.InstanceProfileArn)
|
||||
return parseAccountInfoFromArn(info.InstanceProfileArn)
|
||||
}
|
||||
|
||||
// Then try IAM GetUser
|
||||
log.Println("[DEBUG] Trying to get account ID via iam:GetUser")
|
||||
outUser, err := iamconn.GetUser(nil)
|
||||
if err == nil {
|
||||
return parseAccountIdFromArn(*outUser.User.Arn)
|
||||
return parseAccountInfoFromArn(*outUser.User.Arn)
|
||||
}
|
||||
|
||||
awsErr, ok := err.(awserr.Error)
|
||||
// AccessDenied and ValidationError can be raised
|
||||
// if credentials belong to federated profile, so we ignore these
|
||||
if !ok || (awsErr.Code() != "AccessDenied" && awsErr.Code() != "ValidationError") {
|
||||
return "", fmt.Errorf("Failed getting account ID via 'iam:GetUser': %s", err)
|
||||
return "", "", fmt.Errorf("Failed getting account ID via 'iam:GetUser': %s", err)
|
||||
}
|
||||
log.Printf("[DEBUG] Getting account ID via iam:GetUser failed: %s", err)
|
||||
|
||||
|
@ -55,7 +63,7 @@ func GetAccountId(iamconn *iam.IAM, stsconn *sts.STS, authProviderName string) (
|
|||
log.Println("[DEBUG] Trying to get account ID via sts:GetCallerIdentity")
|
||||
outCallerIdentity, err := stsconn.GetCallerIdentity(&sts.GetCallerIdentityInput{})
|
||||
if err == nil {
|
||||
return *outCallerIdentity.Account, nil
|
||||
return parseAccountInfoFromArn(*outCallerIdentity.Arn)
|
||||
}
|
||||
log.Printf("[DEBUG] Getting account ID via sts:GetCallerIdentity failed: %s", err)
|
||||
|
||||
|
@ -65,39 +73,39 @@ func GetAccountId(iamconn *iam.IAM, stsconn *sts.STS, authProviderName string) (
|
|||
MaxItems: aws.Int64(int64(1)),
|
||||
})
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("Failed getting account ID via 'iam:ListRoles': %s", err)
|
||||
return "", "", fmt.Errorf("Failed getting account ID via 'iam:ListRoles': %s", err)
|
||||
}
|
||||
|
||||
if len(outRoles.Roles) < 1 {
|
||||
return "", fmt.Errorf("Failed getting account ID via 'iam:ListRoles': No roles available")
|
||||
return "", "", errors.New("Failed getting account ID via 'iam:ListRoles': No roles available")
|
||||
}
|
||||
|
||||
return parseAccountIdFromArn(*outRoles.Roles[0].Arn)
|
||||
return parseAccountInfoFromArn(*outRoles.Roles[0].Arn)
|
||||
}
|
||||
|
||||
func parseAccountIdFromArn(arn string) (string, error) {
|
||||
func parseAccountInfoFromArn(arn string) (string, string, error) {
|
||||
parts := strings.Split(arn, ":")
|
||||
if len(parts) < 5 {
|
||||
return "", fmt.Errorf("Unable to parse ID from invalid ARN: %q", arn)
|
||||
return "", "", fmt.Errorf("Unable to parse ID from invalid ARN: %q", arn)
|
||||
}
|
||||
return parts[4], nil
|
||||
return parts[1], parts[4], nil
|
||||
}
|
||||
|
||||
// This function is responsible for reading credentials from the
|
||||
// environment in the case that they're not explicitly specified
|
||||
// in the Terraform configuration.
|
||||
func GetCredentials(key, secret, token, profile, credsfile string) *awsCredentials.Credentials {
|
||||
func GetCredentials(c *Config) (*awsCredentials.Credentials, error) {
|
||||
// build a chain provider, lazy-evaulated by aws-sdk
|
||||
providers := []awsCredentials.Provider{
|
||||
&awsCredentials.StaticProvider{Value: awsCredentials.Value{
|
||||
AccessKeyID: key,
|
||||
SecretAccessKey: secret,
|
||||
SessionToken: token,
|
||||
AccessKeyID: c.AccessKey,
|
||||
SecretAccessKey: c.SecretKey,
|
||||
SessionToken: c.Token,
|
||||
}},
|
||||
&awsCredentials.EnvProvider{},
|
||||
&awsCredentials.SharedCredentialsProvider{
|
||||
Filename: credsfile,
|
||||
Profile: profile,
|
||||
Filename: c.CredsFilename,
|
||||
Profile: c.Profile,
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -111,25 +119,88 @@ func GetCredentials(key, secret, token, profile, credsfile string) *awsCredentia
|
|||
}
|
||||
usedEndpoint := setOptionalEndpoint(cfg)
|
||||
|
||||
// Real AWS should reply to a simple metadata request.
|
||||
// We check it actually does to ensure something else didn't just
|
||||
// happen to be listening on the same IP:Port
|
||||
metadataClient := ec2metadata.New(session.New(cfg))
|
||||
if metadataClient.Available() {
|
||||
providers = append(providers, &ec2rolecreds.EC2RoleProvider{
|
||||
Client: metadataClient,
|
||||
})
|
||||
log.Printf("[INFO] AWS EC2 instance detected via default metadata" +
|
||||
" API endpoint, EC2RoleProvider added to the auth chain")
|
||||
} else {
|
||||
if usedEndpoint == "" {
|
||||
usedEndpoint = "default location"
|
||||
if !c.SkipMetadataApiCheck {
|
||||
// Real AWS should reply to a simple metadata request.
|
||||
// We check it actually does to ensure something else didn't just
|
||||
// happen to be listening on the same IP:Port
|
||||
metadataClient := ec2metadata.New(session.New(cfg))
|
||||
if metadataClient.Available() {
|
||||
providers = append(providers, &ec2rolecreds.EC2RoleProvider{
|
||||
Client: metadataClient,
|
||||
})
|
||||
log.Print("[INFO] AWS EC2 instance detected via default metadata" +
|
||||
" API endpoint, EC2RoleProvider added to the auth chain")
|
||||
} else {
|
||||
if usedEndpoint == "" {
|
||||
usedEndpoint = "default location"
|
||||
}
|
||||
log.Printf("[WARN] Ignoring AWS metadata API endpoint at %s "+
|
||||
"as it doesn't return any instance-id", usedEndpoint)
|
||||
}
|
||||
log.Printf("[WARN] Ignoring AWS metadata API endpoint at %s "+
|
||||
"as it doesn't return any instance-id", usedEndpoint)
|
||||
}
|
||||
|
||||
return awsCredentials.NewChainCredentials(providers)
|
||||
// This is the "normal" flow (i.e. not assuming a role)
|
||||
if c.AssumeRoleARN == "" {
|
||||
return awsCredentials.NewChainCredentials(providers), nil
|
||||
}
|
||||
|
||||
// Otherwise we need to construct and STS client with the main credentials, and verify
|
||||
// that we can assume the defined role.
|
||||
log.Printf("[INFO] Attempting to AssumeRole %s (SessionName: %q, ExternalId: %q)",
|
||||
c.AssumeRoleARN, c.AssumeRoleSessionName, c.AssumeRoleExternalID)
|
||||
|
||||
creds := awsCredentials.NewChainCredentials(providers)
|
||||
cp, err := creds.Get()
|
||||
if err != nil {
|
||||
if awsErr, ok := err.(awserr.Error); ok && awsErr.Code() == "NoCredentialProviders" {
|
||||
return nil, errors.New(`No valid credential sources found for AWS Provider.
|
||||
Please see https://terraform.io/docs/providers/aws/index.html for more information on
|
||||
providing credentials for the AWS Provider`)
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("Error loading credentials for AWS Provider: %s", err)
|
||||
}
|
||||
|
||||
log.Printf("[INFO] AWS Auth provider used: %q", cp.ProviderName)
|
||||
|
||||
awsConfig := &aws.Config{
|
||||
Credentials: creds,
|
||||
Region: aws.String(c.Region),
|
||||
MaxRetries: aws.Int(c.MaxRetries),
|
||||
HTTPClient: cleanhttp.DefaultClient(),
|
||||
S3ForcePathStyle: aws.Bool(c.S3ForcePathStyle),
|
||||
}
|
||||
|
||||
stsclient := sts.New(session.New(awsConfig))
|
||||
assumeRoleProvider := &stscreds.AssumeRoleProvider{
|
||||
Client: stsclient,
|
||||
RoleARN: c.AssumeRoleARN,
|
||||
}
|
||||
if c.AssumeRoleSessionName != "" {
|
||||
assumeRoleProvider.RoleSessionName = c.AssumeRoleSessionName
|
||||
}
|
||||
if c.AssumeRoleExternalID != "" {
|
||||
assumeRoleProvider.ExternalID = aws.String(c.AssumeRoleExternalID)
|
||||
}
|
||||
|
||||
providers = []awsCredentials.Provider{assumeRoleProvider}
|
||||
|
||||
assumeRoleCreds := awsCredentials.NewChainCredentials(providers)
|
||||
_, err = assumeRoleCreds.Get()
|
||||
if err != nil {
|
||||
if awsErr, ok := err.(awserr.Error); ok && awsErr.Code() == "NoCredentialProviders" {
|
||||
return nil, fmt.Errorf("The role %q cannot be assumed.\n\n"+
|
||||
" There are a number of possible causes of this - the most common are:\n"+
|
||||
" * The credentials used in order to assume the role are invalid\n"+
|
||||
" * The credentials do not have appropriate permission to assume the role\n"+
|
||||
" * The role ARN is not valid",
|
||||
c.AssumeRoleARN)
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("Error loading credentials for AWS Provider: %s", err)
|
||||
}
|
||||
|
||||
return assumeRoleCreds, nil
|
||||
}
|
||||
|
||||
func setOptionalEndpoint(cfg *aws.Config) string {
|
||||
|
|
|
@ -21,7 +21,7 @@ import (
|
|||
"github.com/aws/aws-sdk-go/service/sts"
|
||||
)
|
||||
|
||||
func TestAWSGetAccountId_shouldBeValid_fromEC2Role(t *testing.T) {
|
||||
func TestAWSGetAccountInfo_shouldBeValid_fromEC2Role(t *testing.T) {
|
||||
resetEnv := unsetEnv(t)
|
||||
defer resetEnv()
|
||||
// capture the test server's close method, to call after the test returns
|
||||
|
@ -32,18 +32,23 @@ func TestAWSGetAccountId_shouldBeValid_fromEC2Role(t *testing.T) {
|
|||
ts, iamConn, stsConn := getMockedAwsIamStsApi(iamEndpoints)
|
||||
defer ts()
|
||||
|
||||
id, err := GetAccountId(iamConn, stsConn, ec2rolecreds.ProviderName)
|
||||
part, id, err := GetAccountInfo(iamConn, stsConn, ec2rolecreds.ProviderName)
|
||||
if err != nil {
|
||||
t.Fatalf("Getting account ID from EC2 metadata API failed: %s", err)
|
||||
}
|
||||
|
||||
expectedPart := "aws"
|
||||
if part != expectedPart {
|
||||
t.Fatalf("Expected partition: %s, given: %s", expectedPart, part)
|
||||
}
|
||||
|
||||
expectedAccountId := "123456789013"
|
||||
if id != expectedAccountId {
|
||||
t.Fatalf("Expected account ID: %s, given: %s", expectedAccountId, id)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAWSGetAccountId_shouldBeValid_EC2RoleHasPriority(t *testing.T) {
|
||||
func TestAWSGetAccountInfo_shouldBeValid_EC2RoleHasPriority(t *testing.T) {
|
||||
resetEnv := unsetEnv(t)
|
||||
defer resetEnv()
|
||||
// capture the test server's close method, to call after the test returns
|
||||
|
@ -51,7 +56,7 @@ func TestAWSGetAccountId_shouldBeValid_EC2RoleHasPriority(t *testing.T) {
|
|||
defer awsTs()
|
||||
|
||||
iamEndpoints := []*iamEndpoint{
|
||||
&iamEndpoint{
|
||||
{
|
||||
Request: &iamRequest{"POST", "/", "Action=GetUser&Version=2010-05-08"},
|
||||
Response: &iamResponse{200, iamResponse_GetUser_valid, "text/xml"},
|
||||
},
|
||||
|
@ -59,20 +64,25 @@ func TestAWSGetAccountId_shouldBeValid_EC2RoleHasPriority(t *testing.T) {
|
|||
ts, iamConn, stsConn := getMockedAwsIamStsApi(iamEndpoints)
|
||||
defer ts()
|
||||
|
||||
id, err := GetAccountId(iamConn, stsConn, ec2rolecreds.ProviderName)
|
||||
part, id, err := GetAccountInfo(iamConn, stsConn, ec2rolecreds.ProviderName)
|
||||
if err != nil {
|
||||
t.Fatalf("Getting account ID from EC2 metadata API failed: %s", err)
|
||||
}
|
||||
|
||||
expectedPart := "aws"
|
||||
if part != expectedPart {
|
||||
t.Fatalf("Expected partition: %s, given: %s", expectedPart, part)
|
||||
}
|
||||
|
||||
expectedAccountId := "123456789013"
|
||||
if id != expectedAccountId {
|
||||
t.Fatalf("Expected account ID: %s, given: %s", expectedAccountId, id)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAWSGetAccountId_shouldBeValid_fromIamUser(t *testing.T) {
|
||||
func TestAWSGetAccountInfo_shouldBeValid_fromIamUser(t *testing.T) {
|
||||
iamEndpoints := []*iamEndpoint{
|
||||
&iamEndpoint{
|
||||
{
|
||||
Request: &iamRequest{"POST", "/", "Action=GetUser&Version=2010-05-08"},
|
||||
Response: &iamResponse{200, iamResponse_GetUser_valid, "text/xml"},
|
||||
},
|
||||
|
@ -81,24 +91,29 @@ func TestAWSGetAccountId_shouldBeValid_fromIamUser(t *testing.T) {
|
|||
ts, iamConn, stsConn := getMockedAwsIamStsApi(iamEndpoints)
|
||||
defer ts()
|
||||
|
||||
id, err := GetAccountId(iamConn, stsConn, "")
|
||||
part, id, err := GetAccountInfo(iamConn, stsConn, "")
|
||||
if err != nil {
|
||||
t.Fatalf("Getting account ID via GetUser failed: %s", err)
|
||||
}
|
||||
|
||||
expectedPart := "aws"
|
||||
if part != expectedPart {
|
||||
t.Fatalf("Expected partition: %s, given: %s", expectedPart, part)
|
||||
}
|
||||
|
||||
expectedAccountId := "123456789012"
|
||||
if id != expectedAccountId {
|
||||
t.Fatalf("Expected account ID: %s, given: %s", expectedAccountId, id)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAWSGetAccountId_shouldBeValid_fromGetCallerIdentity(t *testing.T) {
|
||||
func TestAWSGetAccountInfo_shouldBeValid_fromGetCallerIdentity(t *testing.T) {
|
||||
iamEndpoints := []*iamEndpoint{
|
||||
&iamEndpoint{
|
||||
{
|
||||
Request: &iamRequest{"POST", "/", "Action=GetUser&Version=2010-05-08"},
|
||||
Response: &iamResponse{403, iamResponse_GetUser_unauthorized, "text/xml"},
|
||||
},
|
||||
&iamEndpoint{
|
||||
{
|
||||
Request: &iamRequest{"POST", "/", "Action=GetCallerIdentity&Version=2011-06-15"},
|
||||
Response: &iamResponse{200, stsResponse_GetCallerIdentity_valid, "text/xml"},
|
||||
},
|
||||
|
@ -106,28 +121,33 @@ func TestAWSGetAccountId_shouldBeValid_fromGetCallerIdentity(t *testing.T) {
|
|||
ts, iamConn, stsConn := getMockedAwsIamStsApi(iamEndpoints)
|
||||
defer ts()
|
||||
|
||||
id, err := GetAccountId(iamConn, stsConn, "")
|
||||
part, id, err := GetAccountInfo(iamConn, stsConn, "")
|
||||
if err != nil {
|
||||
t.Fatalf("Getting account ID via GetUser failed: %s", err)
|
||||
}
|
||||
|
||||
expectedPart := "aws"
|
||||
if part != expectedPart {
|
||||
t.Fatalf("Expected partition: %s, given: %s", expectedPart, part)
|
||||
}
|
||||
|
||||
expectedAccountId := "123456789012"
|
||||
if id != expectedAccountId {
|
||||
t.Fatalf("Expected account ID: %s, given: %s", expectedAccountId, id)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAWSGetAccountId_shouldBeValid_fromIamListRoles(t *testing.T) {
|
||||
func TestAWSGetAccountInfo_shouldBeValid_fromIamListRoles(t *testing.T) {
|
||||
iamEndpoints := []*iamEndpoint{
|
||||
&iamEndpoint{
|
||||
{
|
||||
Request: &iamRequest{"POST", "/", "Action=GetUser&Version=2010-05-08"},
|
||||
Response: &iamResponse{403, iamResponse_GetUser_unauthorized, "text/xml"},
|
||||
},
|
||||
&iamEndpoint{
|
||||
{
|
||||
Request: &iamRequest{"POST", "/", "Action=GetCallerIdentity&Version=2011-06-15"},
|
||||
Response: &iamResponse{403, stsResponse_GetCallerIdentity_unauthorized, "text/xml"},
|
||||
},
|
||||
&iamEndpoint{
|
||||
{
|
||||
Request: &iamRequest{"POST", "/", "Action=ListRoles&MaxItems=1&Version=2010-05-08"},
|
||||
Response: &iamResponse{200, iamResponse_ListRoles_valid, "text/xml"},
|
||||
},
|
||||
|
@ -135,24 +155,29 @@ func TestAWSGetAccountId_shouldBeValid_fromIamListRoles(t *testing.T) {
|
|||
ts, iamConn, stsConn := getMockedAwsIamStsApi(iamEndpoints)
|
||||
defer ts()
|
||||
|
||||
id, err := GetAccountId(iamConn, stsConn, "")
|
||||
part, id, err := GetAccountInfo(iamConn, stsConn, "")
|
||||
if err != nil {
|
||||
t.Fatalf("Getting account ID via ListRoles failed: %s", err)
|
||||
}
|
||||
|
||||
expectedPart := "aws"
|
||||
if part != expectedPart {
|
||||
t.Fatalf("Expected partition: %s, given: %s", expectedPart, part)
|
||||
}
|
||||
|
||||
expectedAccountId := "123456789012"
|
||||
if id != expectedAccountId {
|
||||
t.Fatalf("Expected account ID: %s, given: %s", expectedAccountId, id)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAWSGetAccountId_shouldBeValid_federatedRole(t *testing.T) {
|
||||
func TestAWSGetAccountInfo_shouldBeValid_federatedRole(t *testing.T) {
|
||||
iamEndpoints := []*iamEndpoint{
|
||||
&iamEndpoint{
|
||||
{
|
||||
Request: &iamRequest{"POST", "/", "Action=GetUser&Version=2010-05-08"},
|
||||
Response: &iamResponse{400, iamResponse_GetUser_federatedFailure, "text/xml"},
|
||||
},
|
||||
&iamEndpoint{
|
||||
{
|
||||
Request: &iamRequest{"POST", "/", "Action=ListRoles&MaxItems=1&Version=2010-05-08"},
|
||||
Response: &iamResponse{200, iamResponse_ListRoles_valid, "text/xml"},
|
||||
},
|
||||
|
@ -160,24 +185,29 @@ func TestAWSGetAccountId_shouldBeValid_federatedRole(t *testing.T) {
|
|||
ts, iamConn, stsConn := getMockedAwsIamStsApi(iamEndpoints)
|
||||
defer ts()
|
||||
|
||||
id, err := GetAccountId(iamConn, stsConn, "")
|
||||
part, id, err := GetAccountInfo(iamConn, stsConn, "")
|
||||
if err != nil {
|
||||
t.Fatalf("Getting account ID via ListRoles failed: %s", err)
|
||||
}
|
||||
|
||||
expectedPart := "aws"
|
||||
if part != expectedPart {
|
||||
t.Fatalf("Expected partition: %s, given: %s", expectedPart, part)
|
||||
}
|
||||
|
||||
expectedAccountId := "123456789012"
|
||||
if id != expectedAccountId {
|
||||
t.Fatalf("Expected account ID: %s, given: %s", expectedAccountId, id)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAWSGetAccountId_shouldError_unauthorizedFromIam(t *testing.T) {
|
||||
func TestAWSGetAccountInfo_shouldError_unauthorizedFromIam(t *testing.T) {
|
||||
iamEndpoints := []*iamEndpoint{
|
||||
&iamEndpoint{
|
||||
{
|
||||
Request: &iamRequest{"POST", "/", "Action=GetUser&Version=2010-05-08"},
|
||||
Response: &iamResponse{403, iamResponse_GetUser_unauthorized, "text/xml"},
|
||||
},
|
||||
&iamEndpoint{
|
||||
{
|
||||
Request: &iamRequest{"POST", "/", "Action=ListRoles&MaxItems=1&Version=2010-05-08"},
|
||||
Response: &iamResponse{403, iamResponse_ListRoles_unauthorized, "text/xml"},
|
||||
},
|
||||
|
@ -185,29 +215,37 @@ func TestAWSGetAccountId_shouldError_unauthorizedFromIam(t *testing.T) {
|
|||
ts, iamConn, stsConn := getMockedAwsIamStsApi(iamEndpoints)
|
||||
defer ts()
|
||||
|
||||
id, err := GetAccountId(iamConn, stsConn, "")
|
||||
part, id, err := GetAccountInfo(iamConn, stsConn, "")
|
||||
if err == nil {
|
||||
t.Fatal("Expected error when getting account ID")
|
||||
}
|
||||
|
||||
if part != "" {
|
||||
t.Fatalf("Expected no partition, given: %s", part)
|
||||
}
|
||||
|
||||
if id != "" {
|
||||
t.Fatalf("Expected no account ID, given: %s", id)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAWSParseAccountIdFromArn(t *testing.T) {
|
||||
func TestAWSParseAccountInfoFromArn(t *testing.T) {
|
||||
validArn := "arn:aws:iam::101636750127:instance-profile/aws-elasticbeanstalk-ec2-role"
|
||||
expectedPart := "aws"
|
||||
expectedId := "101636750127"
|
||||
id, err := parseAccountIdFromArn(validArn)
|
||||
part, id, err := parseAccountInfoFromArn(validArn)
|
||||
if err != nil {
|
||||
t.Fatalf("Expected no error when parsing valid ARN: %s", err)
|
||||
}
|
||||
if part != expectedPart {
|
||||
t.Fatalf("Parsed part doesn't match with expected (%q != %q)", part, expectedPart)
|
||||
}
|
||||
if id != expectedId {
|
||||
t.Fatalf("Parsed id doesn't match with expected (%q != %q)", id, expectedId)
|
||||
}
|
||||
|
||||
invalidArn := "blablah"
|
||||
id, err = parseAccountIdFromArn(invalidArn)
|
||||
part, id, err = parseAccountInfoFromArn(invalidArn)
|
||||
if err == nil {
|
||||
t.Fatalf("Expected error when parsing invalid ARN (%q)", invalidArn)
|
||||
}
|
||||
|
@ -218,15 +256,20 @@ func TestAWSGetCredentials_shouldError(t *testing.T) {
|
|||
defer resetEnv()
|
||||
cfg := Config{}
|
||||
|
||||
c := GetCredentials(cfg.AccessKey, cfg.SecretKey, cfg.Token, cfg.Profile, cfg.CredsFilename)
|
||||
_, err := c.Get()
|
||||
c, err := GetCredentials(&cfg)
|
||||
if awsErr, ok := err.(awserr.Error); ok {
|
||||
if awsErr.Code() != "NoCredentialProviders" {
|
||||
t.Fatalf("Expected NoCredentialProviders error")
|
||||
t.Fatal("Expected NoCredentialProviders error")
|
||||
}
|
||||
}
|
||||
_, err = c.Get()
|
||||
if awsErr, ok := err.(awserr.Error); ok {
|
||||
if awsErr.Code() != "NoCredentialProviders" {
|
||||
t.Fatal("Expected NoCredentialProviders error")
|
||||
}
|
||||
}
|
||||
if err == nil {
|
||||
t.Fatalf("Expected an error with empty env, keys, and IAM in AWS Config")
|
||||
t.Fatal("Expected an error with empty env, keys, and IAM in AWS Config")
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -251,14 +294,19 @@ func TestAWSGetCredentials_shouldBeStatic(t *testing.T) {
|
|||
Token: c.Token,
|
||||
}
|
||||
|
||||
creds := GetCredentials(cfg.AccessKey, cfg.SecretKey, cfg.Token, cfg.Profile, cfg.CredsFilename)
|
||||
if creds == nil {
|
||||
t.Fatalf("Expected a static creds provider to be returned")
|
||||
creds, err := GetCredentials(&cfg)
|
||||
if err != nil {
|
||||
t.Fatalf("Error gettings creds: %s", err)
|
||||
}
|
||||
if creds == nil {
|
||||
t.Fatal("Expected a static creds provider to be returned")
|
||||
}
|
||||
|
||||
v, err := creds.Get()
|
||||
if err != nil {
|
||||
t.Fatalf("Error gettings creds: %s", err)
|
||||
}
|
||||
|
||||
if v.AccessKeyID != c.Key {
|
||||
t.Fatalf("AccessKeyID mismatch, expected: (%s), got (%s)", c.Key, v.AccessKeyID)
|
||||
}
|
||||
|
@ -286,9 +334,12 @@ func TestAWSGetCredentials_shouldIAM(t *testing.T) {
|
|||
// An empty config, no key supplied
|
||||
cfg := Config{}
|
||||
|
||||
creds := GetCredentials(cfg.AccessKey, cfg.SecretKey, cfg.Token, cfg.Profile, cfg.CredsFilename)
|
||||
creds, err := GetCredentials(&cfg)
|
||||
if err != nil {
|
||||
t.Fatalf("Error gettings creds: %s", err)
|
||||
}
|
||||
if creds == nil {
|
||||
t.Fatalf("Expected a static creds provider to be returned")
|
||||
t.Fatal("Expected a static creds provider to be returned")
|
||||
}
|
||||
|
||||
v, err := creds.Get()
|
||||
|
@ -335,10 +386,14 @@ func TestAWSGetCredentials_shouldIgnoreIAM(t *testing.T) {
|
|||
Token: c.Token,
|
||||
}
|
||||
|
||||
creds := GetCredentials(cfg.AccessKey, cfg.SecretKey, cfg.Token, cfg.Profile, cfg.CredsFilename)
|
||||
if creds == nil {
|
||||
t.Fatalf("Expected a static creds provider to be returned")
|
||||
creds, err := GetCredentials(&cfg)
|
||||
if err != nil {
|
||||
t.Fatalf("Error gettings creds: %s", err)
|
||||
}
|
||||
if creds == nil {
|
||||
t.Fatal("Expected a static creds provider to be returned")
|
||||
}
|
||||
|
||||
v, err := creds.Get()
|
||||
if err != nil {
|
||||
t.Fatalf("Error gettings creds: %s", err)
|
||||
|
@ -362,7 +417,14 @@ func TestAWSGetCredentials_shouldErrorWithInvalidEndpoint(t *testing.T) {
|
|||
ts := invalidAwsEnv(t)
|
||||
defer ts()
|
||||
|
||||
creds := GetCredentials("", "", "", "", "")
|
||||
creds, err := GetCredentials(&Config{})
|
||||
if err != nil {
|
||||
t.Fatalf("Error gettings creds: %s", err)
|
||||
}
|
||||
if creds == nil {
|
||||
t.Fatal("Expected a static creds provider to be returned")
|
||||
}
|
||||
|
||||
v, err := creds.Get()
|
||||
if err == nil {
|
||||
t.Fatal("Expected error returned when getting creds w/ invalid EC2 endpoint")
|
||||
|
@ -380,11 +442,17 @@ func TestAWSGetCredentials_shouldIgnoreInvalidEndpoint(t *testing.T) {
|
|||
ts := invalidAwsEnv(t)
|
||||
defer ts()
|
||||
|
||||
creds := GetCredentials("accessKey", "secretKey", "", "", "")
|
||||
creds, err := GetCredentials(&Config{AccessKey: "accessKey", SecretKey: "secretKey"})
|
||||
if err != nil {
|
||||
t.Fatalf("Error gettings creds: %s", err)
|
||||
}
|
||||
v, err := creds.Get()
|
||||
if err != nil {
|
||||
t.Fatalf("Getting static credentials w/ invalid EC2 endpoint failed: %s", err)
|
||||
}
|
||||
if creds == nil {
|
||||
t.Fatal("Expected a static creds provider to be returned")
|
||||
}
|
||||
|
||||
if v.ProviderName != "StaticProvider" {
|
||||
t.Fatalf("Expected provider name to be %q, %q given", "StaticProvider", v.ProviderName)
|
||||
|
@ -406,10 +474,14 @@ func TestAWSGetCredentials_shouldCatchEC2RoleProvider(t *testing.T) {
|
|||
ts := awsEnv(t)
|
||||
defer ts()
|
||||
|
||||
creds := GetCredentials("", "", "", "", "")
|
||||
if creds == nil {
|
||||
t.Fatalf("Expected an EC2Role creds provider to be returned")
|
||||
creds, err := GetCredentials(&Config{})
|
||||
if err != nil {
|
||||
t.Fatalf("Error gettings creds: %s", err)
|
||||
}
|
||||
if creds == nil {
|
||||
t.Fatal("Expected an EC2Role creds provider to be returned")
|
||||
}
|
||||
|
||||
v, err := creds.Get()
|
||||
if err != nil {
|
||||
t.Fatalf("Expected no error when getting creds: %s", err)
|
||||
|
@ -452,10 +524,14 @@ func TestAWSGetCredentials_shouldBeShared(t *testing.T) {
|
|||
t.Fatalf("Error resetting env var AWS_SHARED_CREDENTIALS_FILE: %s", err)
|
||||
}
|
||||
|
||||
creds := GetCredentials("", "", "", "myprofile", file.Name())
|
||||
if creds == nil {
|
||||
t.Fatalf("Expected a provider chain to be returned")
|
||||
creds, err := GetCredentials(&Config{Profile: "myprofile", CredsFilename: file.Name()})
|
||||
if err != nil {
|
||||
t.Fatalf("Error gettings creds: %s", err)
|
||||
}
|
||||
if creds == nil {
|
||||
t.Fatal("Expected a provider chain to be returned")
|
||||
}
|
||||
|
||||
v, err := creds.Get()
|
||||
if err != nil {
|
||||
t.Fatalf("Error gettings creds: %s", err)
|
||||
|
@ -479,10 +555,14 @@ func TestAWSGetCredentials_shouldBeENV(t *testing.T) {
|
|||
defer resetEnv()
|
||||
|
||||
cfg := Config{}
|
||||
creds := GetCredentials(cfg.AccessKey, cfg.SecretKey, cfg.Token, cfg.Profile, cfg.CredsFilename)
|
||||
creds, err := GetCredentials(&cfg)
|
||||
if err != nil {
|
||||
t.Fatalf("Error gettings creds: %s", err)
|
||||
}
|
||||
if creds == nil {
|
||||
t.Fatalf("Expected a static creds provider to be returned")
|
||||
}
|
||||
|
||||
v, err := creds.Get()
|
||||
if err != nil {
|
||||
t.Fatalf("Error gettings creds: %s", err)
|
||||
|
@ -498,7 +578,7 @@ func TestAWSGetCredentials_shouldBeENV(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
// unsetEnv unsets enviornment variables for testing a "clean slate" with no
|
||||
// unsetEnv unsets environment variables for testing a "clean slate" with no
|
||||
// credentials in the environment
|
||||
func unsetEnv(t *testing.T) func() {
|
||||
// Grab any existing AWS keys and preserve. In some tests we'll unset these, so
|
||||
|
@ -648,12 +728,15 @@ func getMockedAwsIamStsApi(endpoints []*iamEndpoint) (func(), *iam.IAM, *sts.STS
|
|||
|
||||
sc := awsCredentials.NewStaticCredentials("accessKey", "secretKey", "")
|
||||
|
||||
sess := session.New(&aws.Config{
|
||||
sess, err := session.NewSession(&aws.Config{
|
||||
Credentials: sc,
|
||||
Region: aws.String("us-east-1"),
|
||||
Endpoint: aws.String(ts.URL),
|
||||
CredentialsChainVerboseErrors: aws.Bool(true),
|
||||
})
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("Error creating AWS Session: %s", err))
|
||||
}
|
||||
iamConn := iam.New(sess)
|
||||
stsConn := sts.New(sess)
|
||||
return ts.Close, iamConn, stsConn
|
||||
|
|
|
@ -4,6 +4,7 @@ import (
|
|||
"bytes"
|
||||
"fmt"
|
||||
"log"
|
||||
"regexp"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/service/autoscaling"
|
||||
|
@ -119,13 +120,16 @@ func autoscalingTagsFromMap(m map[string]interface{}, resourceID string) []*auto
|
|||
result := make([]*autoscaling.Tag, 0, len(m))
|
||||
for k, v := range m {
|
||||
attr := v.(map[string]interface{})
|
||||
result = append(result, &autoscaling.Tag{
|
||||
t := &autoscaling.Tag{
|
||||
Key: aws.String(k),
|
||||
Value: aws.String(attr["value"].(string)),
|
||||
PropagateAtLaunch: aws.Bool(attr["propagate_at_launch"].(bool)),
|
||||
ResourceId: aws.String(resourceID),
|
||||
ResourceType: aws.String("auto-scaling-group"),
|
||||
})
|
||||
}
|
||||
if !tagIgnoredAutoscaling(t) {
|
||||
result = append(result, t)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
|
@ -182,3 +186,17 @@ func setToMapByKey(s *schema.Set, key string) map[string]interface{} {
|
|||
|
||||
return result
|
||||
}
|
||||
|
||||
// compare a tag against a list of strings and checks if it should
|
||||
// be ignored or not
|
||||
func tagIgnoredAutoscaling(t *autoscaling.Tag) bool {
|
||||
filter := []string{"^aws:*"}
|
||||
for _, v := range filter {
|
||||
log.Printf("[DEBUG] Matching %v with %v\n", v, *t.Key)
|
||||
if r, _ := regexp.MatchString(v, *t.Key); r == true {
|
||||
log.Printf("[DEBUG] Found AWS specific tag %s (val: %s), ignoring.\n", *t.Key, *t.Value)
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@ import (
|
|||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/service/autoscaling"
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
|
@ -120,3 +121,20 @@ func testAccCheckAutoscalingTagNotExists(ts *[]*autoscaling.TagDescription, key
|
|||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func TestIgnoringTagsAutoscaling(t *testing.T) {
|
||||
var ignoredTags []*autoscaling.Tag
|
||||
ignoredTags = append(ignoredTags, &autoscaling.Tag{
|
||||
Key: aws.String("aws:cloudformation:logical-id"),
|
||||
Value: aws.String("foo"),
|
||||
})
|
||||
ignoredTags = append(ignoredTags, &autoscaling.Tag{
|
||||
Key: aws.String("aws:foo:bar"),
|
||||
Value: aws.String("baz"),
|
||||
})
|
||||
for _, tag := range ignoredTags {
|
||||
if !tagIgnoredAutoscaling(tag) {
|
||||
t.Fatalf("Tag %v with value %v not ignored, but should be!", *tag.Key, *tag.Value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,14 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws/awserr"
|
||||
)
|
||||
|
||||
func isAWSErr(err error, code string, message string) bool {
|
||||
if err, ok := err.(awserr.Error); ok {
|
||||
return err.Code() == code && strings.Contains(err.Message(), message)
|
||||
}
|
||||
return false
|
||||
}
|
|
@ -11,6 +11,7 @@ import (
|
|||
"bytes"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
|
@ -25,6 +26,14 @@ import (
|
|||
// is used to set the zone_id attribute.
|
||||
const cloudFrontRoute53ZoneID = "Z2FDTNDATAQYW2"
|
||||
|
||||
// Define Sort interface for []*string so we can ensure the order of
|
||||
// geo_restrictions.locations
|
||||
type StringPtrSlice []*string
|
||||
|
||||
func (p StringPtrSlice) Len() int { return len(p) }
|
||||
func (p StringPtrSlice) Less(i, j int) bool { return *p[i] < *p[j] }
|
||||
func (p StringPtrSlice) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
|
||||
|
||||
// Assemble the *cloudfront.DistributionConfig variable. Calls out to various
|
||||
// expander functions to convert attributes and sub-attributes to the various
|
||||
// complex structures which are necessary to properly build the
|
||||
|
@ -37,6 +46,7 @@ func expandDistributionConfig(d *schema.ResourceData) *cloudfront.DistributionCo
|
|||
CustomErrorResponses: expandCustomErrorResponses(d.Get("custom_error_response").(*schema.Set)),
|
||||
DefaultCacheBehavior: expandDefaultCacheBehavior(d.Get("default_cache_behavior").(*schema.Set).List()[0].(map[string]interface{})),
|
||||
Enabled: aws.Bool(d.Get("enabled").(bool)),
|
||||
HttpVersion: aws.String(d.Get("http_version").(string)),
|
||||
Origins: expandOrigins(d.Get("origin").(*schema.Set)),
|
||||
PriceClass: aws.String(d.Get("price_class").(string)),
|
||||
}
|
||||
|
@ -77,6 +87,7 @@ func expandDistributionConfig(d *schema.ResourceData) *cloudfront.DistributionCo
|
|||
} else {
|
||||
distributionConfig.WebACLId = aws.String("")
|
||||
}
|
||||
|
||||
return distributionConfig
|
||||
}
|
||||
|
||||
|
@ -113,6 +124,9 @@ func flattenDistributionConfig(d *schema.ResourceData, distributionConfig *cloud
|
|||
if distributionConfig.DefaultRootObject != nil {
|
||||
d.Set("default_root_object", distributionConfig.DefaultRootObject)
|
||||
}
|
||||
if distributionConfig.HttpVersion != nil {
|
||||
d.Set("http_version", distributionConfig.HttpVersion)
|
||||
}
|
||||
if distributionConfig.WebACLId != nil {
|
||||
d.Set("web_acl_id", distributionConfig.WebACLId)
|
||||
}
|
||||
|
@ -369,6 +383,9 @@ func expandForwardedValues(m map[string]interface{}) *cloudfront.ForwardedValues
|
|||
if v, ok := m["headers"]; ok {
|
||||
fv.Headers = expandHeaders(v.([]interface{}))
|
||||
}
|
||||
if v, ok := m["query_string_cache_keys"]; ok {
|
||||
fv.QueryStringCacheKeys = expandQueryStringCacheKeys(v.([]interface{}))
|
||||
}
|
||||
return fv
|
||||
}
|
||||
|
||||
|
@ -381,6 +398,9 @@ func flattenForwardedValues(fv *cloudfront.ForwardedValues) map[string]interface
|
|||
if fv.Headers != nil {
|
||||
m["headers"] = flattenHeaders(fv.Headers)
|
||||
}
|
||||
if fv.QueryStringCacheKeys != nil {
|
||||
m["query_string_cache_keys"] = flattenQueryStringCacheKeys(fv.QueryStringCacheKeys)
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
||||
|
@ -398,6 +418,11 @@ func forwardedValuesHash(v interface{}) int {
|
|||
buf.WriteString(fmt.Sprintf("%s-", e.(string)))
|
||||
}
|
||||
}
|
||||
if d, ok := m["query_string_cache_keys"]; ok {
|
||||
for _, e := range sortInterfaceSlice(d.([]interface{})) {
|
||||
buf.WriteString(fmt.Sprintf("%s-", e.(string)))
|
||||
}
|
||||
}
|
||||
return hashcode.String(buf.String())
|
||||
}
|
||||
|
||||
|
@ -415,6 +440,20 @@ func flattenHeaders(h *cloudfront.Headers) []interface{} {
|
|||
return []interface{}{}
|
||||
}
|
||||
|
||||
func expandQueryStringCacheKeys(d []interface{}) *cloudfront.QueryStringCacheKeys {
|
||||
return &cloudfront.QueryStringCacheKeys{
|
||||
Quantity: aws.Int64(int64(len(d))),
|
||||
Items: expandStringList(d),
|
||||
}
|
||||
}
|
||||
|
||||
func flattenQueryStringCacheKeys(k *cloudfront.QueryStringCacheKeys) []interface{} {
|
||||
if k.Items != nil {
|
||||
return flattenStringList(k.Items)
|
||||
}
|
||||
return []interface{}{}
|
||||
}
|
||||
|
||||
func expandCookiePreference(m map[string]interface{}) *cloudfront.CookiePreference {
|
||||
cp := &cloudfront.CookiePreference{
|
||||
Forward: aws.String(m["forward"].(string)),
|
||||
|
@ -873,6 +912,7 @@ func expandGeoRestriction(m map[string]interface{}) *cloudfront.GeoRestriction {
|
|||
if v, ok := m["locations"]; ok {
|
||||
gr.Quantity = aws.Int64(int64(len(v.([]interface{}))))
|
||||
gr.Items = expandStringList(v.([]interface{}))
|
||||
sort.Sort(StringPtrSlice(gr.Items))
|
||||
} else {
|
||||
gr.Quantity = aws.Int64(0)
|
||||
}
|
||||
|
@ -884,6 +924,7 @@ func flattenGeoRestriction(gr *cloudfront.GeoRestriction) map[string]interface{}
|
|||
|
||||
m["restriction_type"] = *gr.RestrictionType
|
||||
if gr.Items != nil {
|
||||
sort.Sort(StringPtrSlice(gr.Items))
|
||||
m["locations"] = flattenStringList(gr.Items)
|
||||
}
|
||||
return m
|
||||
|
|
|
@ -46,9 +46,10 @@ func trustedSignersConf() []interface{} {
|
|||
|
||||
func forwardedValuesConf() map[string]interface{} {
|
||||
return map[string]interface{}{
|
||||
"query_string": true,
|
||||
"cookies": schema.NewSet(cookiePreferenceHash, []interface{}{cookiePreferenceConf()}),
|
||||
"headers": headersConf(),
|
||||
"query_string": true,
|
||||
"query_string_cache_keys": queryStringCacheKeysConf(),
|
||||
"cookies": schema.NewSet(cookiePreferenceHash, []interface{}{cookiePreferenceConf()}),
|
||||
"headers": headersConf(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -56,6 +57,10 @@ func headersConf() []interface{} {
|
|||
return []interface{}{"X-Example1", "X-Example2"}
|
||||
}
|
||||
|
||||
func queryStringCacheKeysConf() []interface{} {
|
||||
return []interface{}{"foo", "bar"}
|
||||
}
|
||||
|
||||
func cookiePreferenceConf() map[string]interface{} {
|
||||
return map[string]interface{}{
|
||||
"forward": "whitelist",
|
||||
|
@ -473,6 +478,27 @@ func TestCloudFrontStructure_flattenHeaders(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestCloudFrontStructure_expandQueryStringCacheKeys(t *testing.T) {
|
||||
data := queryStringCacheKeysConf()
|
||||
k := expandQueryStringCacheKeys(data)
|
||||
if *k.Quantity != 2 {
|
||||
t.Fatalf("Expected Quantity to be 2, got %v", *k.Quantity)
|
||||
}
|
||||
if reflect.DeepEqual(k.Items, expandStringList(data)) != true {
|
||||
t.Fatalf("Expected Items to be %v, got %v", data, k.Items)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCloudFrontStructure_flattenQueryStringCacheKeys(t *testing.T) {
|
||||
in := queryStringCacheKeysConf()
|
||||
k := expandQueryStringCacheKeys(in)
|
||||
out := flattenQueryStringCacheKeys(k)
|
||||
|
||||
if reflect.DeepEqual(in, out) != true {
|
||||
t.Fatalf("Expected out to be %v, got %v", in, out)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCloudFrontStructure_expandCookiePreference(t *testing.T) {
|
||||
data := cookiePreferenceConf()
|
||||
cp := expandCookiePreference(data)
|
||||
|
|
|
@ -1,23 +1,22 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"errors"
|
||||
"fmt"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/hashicorp/go-cleanhttp"
|
||||
"github.com/hashicorp/go-multierror"
|
||||
"github.com/hashicorp/terraform/helper/logging"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
|
||||
"crypto/tls"
|
||||
"time"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/aws/awserr"
|
||||
"github.com/aws/aws-sdk-go/aws/request"
|
||||
"github.com/aws/aws-sdk-go/aws/session"
|
||||
"github.com/aws/aws-sdk-go/service/acm"
|
||||
"github.com/aws/aws-sdk-go/service/apigateway"
|
||||
"github.com/aws/aws-sdk-go/service/applicationautoscaling"
|
||||
"github.com/aws/aws-sdk-go/service/autoscaling"
|
||||
"github.com/aws/aws-sdk-go/service/cloudformation"
|
||||
"github.com/aws/aws-sdk-go/service/cloudfront"
|
||||
|
@ -38,6 +37,7 @@ import (
|
|||
elasticsearch "github.com/aws/aws-sdk-go/service/elasticsearchservice"
|
||||
"github.com/aws/aws-sdk-go/service/elastictranscoder"
|
||||
"github.com/aws/aws-sdk-go/service/elb"
|
||||
"github.com/aws/aws-sdk-go/service/elbv2"
|
||||
"github.com/aws/aws-sdk-go/service/emr"
|
||||
"github.com/aws/aws-sdk-go/service/firehose"
|
||||
"github.com/aws/aws-sdk-go/service/glacier"
|
||||
|
@ -50,9 +50,18 @@ import (
|
|||
"github.com/aws/aws-sdk-go/service/redshift"
|
||||
"github.com/aws/aws-sdk-go/service/route53"
|
||||
"github.com/aws/aws-sdk-go/service/s3"
|
||||
"github.com/aws/aws-sdk-go/service/ses"
|
||||
"github.com/aws/aws-sdk-go/service/simpledb"
|
||||
"github.com/aws/aws-sdk-go/service/sns"
|
||||
"github.com/aws/aws-sdk-go/service/sqs"
|
||||
"github.com/aws/aws-sdk-go/service/ssm"
|
||||
"github.com/aws/aws-sdk-go/service/sts"
|
||||
"github.com/aws/aws-sdk-go/service/waf"
|
||||
"github.com/davecgh/go-spew/spew"
|
||||
"github.com/hashicorp/errwrap"
|
||||
"github.com/hashicorp/go-cleanhttp"
|
||||
"github.com/hashicorp/terraform/helper/logging"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
)
|
||||
|
||||
type Config struct {
|
||||
|
@ -64,6 +73,10 @@ type Config struct {
|
|||
Region string
|
||||
MaxRetries int
|
||||
|
||||
AssumeRoleARN string
|
||||
AssumeRoleExternalID string
|
||||
AssumeRoleSessionName string
|
||||
|
||||
AllowedAccountIds []interface{}
|
||||
ForbiddenAccountIds []interface{}
|
||||
|
||||
|
@ -72,7 +85,13 @@ type Config struct {
|
|||
Ec2Endpoint string
|
||||
IamEndpoint string
|
||||
ElbEndpoint string
|
||||
S3Endpoint string
|
||||
Insecure bool
|
||||
|
||||
SkipCredsValidation bool
|
||||
SkipRequestingAccountId bool
|
||||
SkipMetadataApiCheck bool
|
||||
S3ForcePathStyle bool
|
||||
}
|
||||
|
||||
type AWSClient struct {
|
||||
|
@ -89,16 +108,22 @@ type AWSClient struct {
|
|||
ecsconn *ecs.ECS
|
||||
efsconn *efs.EFS
|
||||
elbconn *elb.ELB
|
||||
elbv2conn *elbv2.ELBV2
|
||||
emrconn *emr.EMR
|
||||
esconn *elasticsearch.ElasticsearchService
|
||||
acmconn *acm.ACM
|
||||
apigateway *apigateway.APIGateway
|
||||
appautoscalingconn *applicationautoscaling.ApplicationAutoScaling
|
||||
autoscalingconn *autoscaling.AutoScaling
|
||||
s3conn *s3.S3
|
||||
sesConn *ses.SES
|
||||
simpledbconn *simpledb.SimpleDB
|
||||
sqsconn *sqs.SQS
|
||||
snsconn *sns.SNS
|
||||
stsconn *sts.STS
|
||||
redshiftconn *redshift.Redshift
|
||||
r53conn *route53.Route53
|
||||
partition string
|
||||
accountid string
|
||||
region string
|
||||
rdsconn *rds.RDS
|
||||
|
@ -114,218 +139,183 @@ type AWSClient struct {
|
|||
glacierconn *glacier.Glacier
|
||||
codedeployconn *codedeploy.CodeDeploy
|
||||
codecommitconn *codecommit.CodeCommit
|
||||
ssmconn *ssm.SSM
|
||||
wafconn *waf.WAF
|
||||
}
|
||||
|
||||
// Client configures and returns a fully initialized AWSClient
|
||||
func (c *Config) Client() (interface{}, error) {
|
||||
// Get the auth and region. This can fail if keys/regions were not
|
||||
// specified and we're attempting to use the environment.
|
||||
var errs []error
|
||||
|
||||
log.Println("[INFO] Building AWS region structure")
|
||||
err := c.ValidateRegion()
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var client AWSClient
|
||||
if len(errs) == 0 {
|
||||
// store AWS region in client struct, for region specific operations such as
|
||||
// bucket storage in S3
|
||||
client.region = c.Region
|
||||
// store AWS region in client struct, for region specific operations such as
|
||||
// bucket storage in S3
|
||||
client.region = c.Region
|
||||
|
||||
log.Println("[INFO] Building AWS auth structure")
|
||||
creds := GetCredentials(c.AccessKey, c.SecretKey, c.Token, c.Profile, c.CredsFilename)
|
||||
// Call Get to check for credential provider. If nothing found, we'll get an
|
||||
// error, and we can present it nicely to the user
|
||||
cp, err := creds.Get()
|
||||
if err != nil {
|
||||
if awsErr, ok := err.(awserr.Error); ok && awsErr.Code() == "NoCredentialProviders" {
|
||||
errs = append(errs, fmt.Errorf(`No valid credential sources found for AWS Provider.
|
||||
log.Println("[INFO] Building AWS auth structure")
|
||||
creds, err := GetCredentials(c)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// Call Get to check for credential provider. If nothing found, we'll get an
|
||||
// error, and we can present it nicely to the user
|
||||
cp, err := creds.Get()
|
||||
if err != nil {
|
||||
if awsErr, ok := err.(awserr.Error); ok && awsErr.Code() == "NoCredentialProviders" {
|
||||
return nil, errors.New(`No valid credential sources found for AWS Provider.
|
||||
Please see https://terraform.io/docs/providers/aws/index.html for more information on
|
||||
providing credentials for the AWS Provider`))
|
||||
} else {
|
||||
errs = append(errs, fmt.Errorf("Error loading credentials for AWS Provider: %s", err))
|
||||
}
|
||||
return nil, &multierror.Error{Errors: errs}
|
||||
providing credentials for the AWS Provider`)
|
||||
}
|
||||
|
||||
log.Printf("[INFO] AWS Auth provider used: %q", cp.ProviderName)
|
||||
return nil, fmt.Errorf("Error loading credentials for AWS Provider: %s", err)
|
||||
}
|
||||
|
||||
awsConfig := &aws.Config{
|
||||
Credentials: creds,
|
||||
Region: aws.String(c.Region),
|
||||
MaxRetries: aws.Int(c.MaxRetries),
|
||||
HTTPClient: cleanhttp.DefaultClient(),
|
||||
log.Printf("[INFO] AWS Auth provider used: %q", cp.ProviderName)
|
||||
|
||||
awsConfig := &aws.Config{
|
||||
Credentials: creds,
|
||||
Region: aws.String(c.Region),
|
||||
MaxRetries: aws.Int(c.MaxRetries),
|
||||
HTTPClient: cleanhttp.DefaultClient(),
|
||||
S3ForcePathStyle: aws.Bool(c.S3ForcePathStyle),
|
||||
}
|
||||
|
||||
if logging.IsDebugOrHigher() {
|
||||
awsConfig.LogLevel = aws.LogLevel(aws.LogDebugWithHTTPBody)
|
||||
awsConfig.Logger = awsLogger{}
|
||||
}
|
||||
|
||||
if c.Insecure {
|
||||
transport := awsConfig.HTTPClient.Transport.(*http.Transport)
|
||||
transport.TLSClientConfig = &tls.Config{
|
||||
InsecureSkipVerify: true,
|
||||
}
|
||||
}
|
||||
|
||||
if logging.IsDebugOrHigher() {
|
||||
awsConfig.LogLevel = aws.LogLevel(aws.LogDebugWithHTTPBody)
|
||||
awsConfig.Logger = awsLogger{}
|
||||
}
|
||||
// Set up base session
|
||||
sess, err := session.NewSession(awsConfig)
|
||||
if err != nil {
|
||||
return nil, errwrap.Wrapf("Error creating AWS session: {{err}}", err)
|
||||
}
|
||||
|
||||
if c.Insecure {
|
||||
transport := awsConfig.HTTPClient.Transport.(*http.Transport)
|
||||
transport.TLSClientConfig = &tls.Config{
|
||||
InsecureSkipVerify: true,
|
||||
}
|
||||
}
|
||||
// Removes the SDK Version handler, so we only have the provider User-Agent
|
||||
// Ex: "User-Agent: APN/1.0 HashiCorp/1.0 Terraform/0.7.9-dev"
|
||||
sess.Handlers.Build.Remove(request.NamedHandler{Name: "core.SDKVersionUserAgentHandler"})
|
||||
sess.Handlers.Build.PushFrontNamed(addTerraformVersionToUserAgent)
|
||||
|
||||
// Set up base session
|
||||
sess := session.New(awsConfig)
|
||||
sess.Handlers.Build.PushFrontNamed(addTerraformVersionToUserAgent)
|
||||
if extraDebug := os.Getenv("TERRAFORM_AWS_AUTHFAILURE_DEBUG"); extraDebug != "" {
|
||||
sess.Handlers.UnmarshalError.PushFrontNamed(debugAuthFailure)
|
||||
}
|
||||
|
||||
log.Println("[INFO] Initializing IAM Connection")
|
||||
awsIamSess := sess.Copy(&aws.Config{Endpoint: aws.String(c.IamEndpoint)})
|
||||
client.iamconn = iam.New(awsIamSess)
|
||||
// Some services exist only in us-east-1, e.g. because they manage
|
||||
// resources that can span across multiple regions, or because
|
||||
// signature format v4 requires region to be us-east-1 for global
|
||||
// endpoints:
|
||||
// http://docs.aws.amazon.com/general/latest/gr/sigv4_changes.html
|
||||
usEast1Sess := sess.Copy(&aws.Config{Region: aws.String("us-east-1")})
|
||||
|
||||
log.Println("[INFO] Initializing STS connection")
|
||||
client.stsconn = sts.New(sess)
|
||||
// Some services have user-configurable endpoints
|
||||
awsEc2Sess := sess.Copy(&aws.Config{Endpoint: aws.String(c.Ec2Endpoint)})
|
||||
awsElbSess := sess.Copy(&aws.Config{Endpoint: aws.String(c.ElbEndpoint)})
|
||||
awsIamSess := sess.Copy(&aws.Config{Endpoint: aws.String(c.IamEndpoint)})
|
||||
awsS3Sess := sess.Copy(&aws.Config{Endpoint: aws.String(c.S3Endpoint)})
|
||||
dynamoSess := sess.Copy(&aws.Config{Endpoint: aws.String(c.DynamoDBEndpoint)})
|
||||
kinesisSess := sess.Copy(&aws.Config{Endpoint: aws.String(c.KinesisEndpoint)})
|
||||
|
||||
err = c.ValidateCredentials(client.iamconn)
|
||||
// These two services need to be set up early so we can check on AccountID
|
||||
client.iamconn = iam.New(awsIamSess)
|
||||
client.stsconn = sts.New(sess)
|
||||
|
||||
if !c.SkipCredsValidation {
|
||||
err = c.ValidateCredentials(client.stsconn)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
return nil, &multierror.Error{Errors: errs}
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Some services exist only in us-east-1, e.g. because they manage
|
||||
// resources that can span across multiple regions, or because
|
||||
// signature format v4 requires region to be us-east-1 for global
|
||||
// endpoints:
|
||||
// http://docs.aws.amazon.com/general/latest/gr/sigv4_changes.html
|
||||
usEast1Sess := sess.Copy(&aws.Config{Region: aws.String("us-east-1")})
|
||||
|
||||
accountId, err := GetAccountId(client.iamconn, client.stsconn, cp.ProviderName)
|
||||
if !c.SkipRequestingAccountId {
|
||||
partition, accountId, err := GetAccountInfo(client.iamconn, client.stsconn, cp.ProviderName)
|
||||
if err == nil {
|
||||
client.partition = partition
|
||||
client.accountid = accountId
|
||||
}
|
||||
|
||||
log.Println("[INFO] Initializing DynamoDB connection")
|
||||
dynamoSess := sess.Copy(&aws.Config{Endpoint: aws.String(c.DynamoDBEndpoint)})
|
||||
client.dynamodbconn = dynamodb.New(dynamoSess)
|
||||
|
||||
log.Println("[INFO] Initializing Cloudfront connection")
|
||||
client.cloudfrontconn = cloudfront.New(sess)
|
||||
|
||||
log.Println("[INFO] Initializing ELB connection")
|
||||
awsElbSess := sess.Copy(&aws.Config{Endpoint: aws.String(c.ElbEndpoint)})
|
||||
client.elbconn = elb.New(awsElbSess)
|
||||
|
||||
log.Println("[INFO] Initializing S3 connection")
|
||||
client.s3conn = s3.New(sess)
|
||||
|
||||
log.Println("[INFO] Initializing SQS connection")
|
||||
client.sqsconn = sqs.New(sess)
|
||||
|
||||
log.Println("[INFO] Initializing SNS connection")
|
||||
client.snsconn = sns.New(sess)
|
||||
|
||||
log.Println("[INFO] Initializing RDS Connection")
|
||||
client.rdsconn = rds.New(sess)
|
||||
|
||||
log.Println("[INFO] Initializing Kinesis Connection")
|
||||
kinesisSess := sess.Copy(&aws.Config{Endpoint: aws.String(c.KinesisEndpoint)})
|
||||
client.kinesisconn = kinesis.New(kinesisSess)
|
||||
|
||||
log.Println("[INFO] Initializing Elastic Beanstalk Connection")
|
||||
client.elasticbeanstalkconn = elasticbeanstalk.New(sess)
|
||||
|
||||
log.Println("[INFO] Initializing Elastic Transcoder Connection")
|
||||
client.elastictranscoderconn = elastictranscoder.New(sess)
|
||||
|
||||
authErr := c.ValidateAccountId(client.accountid)
|
||||
if authErr != nil {
|
||||
errs = append(errs, authErr)
|
||||
}
|
||||
|
||||
log.Println("[INFO] Initializing Kinesis Firehose Connection")
|
||||
client.firehoseconn = firehose.New(sess)
|
||||
|
||||
log.Println("[INFO] Initializing AutoScaling connection")
|
||||
client.autoscalingconn = autoscaling.New(sess)
|
||||
|
||||
log.Println("[INFO] Initializing EC2 Connection")
|
||||
|
||||
awsEc2Sess := sess.Copy(&aws.Config{Endpoint: aws.String(c.Ec2Endpoint)})
|
||||
client.ec2conn = ec2.New(awsEc2Sess)
|
||||
|
||||
log.Println("[INFO] Initializing ECR Connection")
|
||||
client.ecrconn = ecr.New(sess)
|
||||
|
||||
log.Println("[INFO] Initializing API Gateway")
|
||||
client.apigateway = apigateway.New(sess)
|
||||
|
||||
log.Println("[INFO] Initializing ECS Connection")
|
||||
client.ecsconn = ecs.New(sess)
|
||||
|
||||
log.Println("[INFO] Initializing EFS Connection")
|
||||
client.efsconn = efs.New(sess)
|
||||
|
||||
log.Println("[INFO] Initializing ElasticSearch Connection")
|
||||
client.esconn = elasticsearch.New(sess)
|
||||
|
||||
log.Println("[INFO] Initializing EMR Connection")
|
||||
client.emrconn = emr.New(sess)
|
||||
|
||||
log.Println("[INFO] Initializing Route 53 connection")
|
||||
client.r53conn = route53.New(usEast1Sess)
|
||||
|
||||
log.Println("[INFO] Initializing Elasticache Connection")
|
||||
client.elasticacheconn = elasticache.New(sess)
|
||||
|
||||
log.Println("[INFO] Initializing Lambda Connection")
|
||||
client.lambdaconn = lambda.New(sess)
|
||||
|
||||
log.Println("[INFO] Initializing Cloudformation Connection")
|
||||
client.cfconn = cloudformation.New(sess)
|
||||
|
||||
log.Println("[INFO] Initializing CloudWatch SDK connection")
|
||||
client.cloudwatchconn = cloudwatch.New(sess)
|
||||
|
||||
log.Println("[INFO] Initializing CloudWatch Events connection")
|
||||
client.cloudwatcheventsconn = cloudwatchevents.New(sess)
|
||||
|
||||
log.Println("[INFO] Initializing CloudTrail connection")
|
||||
client.cloudtrailconn = cloudtrail.New(sess)
|
||||
|
||||
log.Println("[INFO] Initializing CloudWatch Logs connection")
|
||||
client.cloudwatchlogsconn = cloudwatchlogs.New(sess)
|
||||
|
||||
log.Println("[INFO] Initializing OpsWorks Connection")
|
||||
client.opsworksconn = opsworks.New(usEast1Sess)
|
||||
|
||||
log.Println("[INFO] Initializing Directory Service connection")
|
||||
client.dsconn = directoryservice.New(sess)
|
||||
|
||||
log.Println("[INFO] Initializing Glacier connection")
|
||||
client.glacierconn = glacier.New(sess)
|
||||
|
||||
log.Println("[INFO] Initializing CodeDeploy Connection")
|
||||
client.codedeployconn = codedeploy.New(sess)
|
||||
|
||||
log.Println("[INFO] Initializing CodeCommit SDK connection")
|
||||
client.codecommitconn = codecommit.New(usEast1Sess)
|
||||
|
||||
log.Println("[INFO] Initializing Redshift SDK connection")
|
||||
client.redshiftconn = redshift.New(sess)
|
||||
|
||||
log.Println("[INFO] Initializing KMS connection")
|
||||
client.kmsconn = kms.New(sess)
|
||||
}
|
||||
|
||||
if len(errs) > 0 {
|
||||
return nil, &multierror.Error{Errors: errs}
|
||||
authErr := c.ValidateAccountId(client.accountid)
|
||||
if authErr != nil {
|
||||
return nil, authErr
|
||||
}
|
||||
|
||||
client.acmconn = acm.New(sess)
|
||||
client.apigateway = apigateway.New(sess)
|
||||
client.appautoscalingconn = applicationautoscaling.New(sess)
|
||||
client.autoscalingconn = autoscaling.New(sess)
|
||||
client.cfconn = cloudformation.New(sess)
|
||||
client.cloudfrontconn = cloudfront.New(sess)
|
||||
client.cloudtrailconn = cloudtrail.New(sess)
|
||||
client.cloudwatchconn = cloudwatch.New(sess)
|
||||
client.cloudwatcheventsconn = cloudwatchevents.New(sess)
|
||||
client.cloudwatchlogsconn = cloudwatchlogs.New(sess)
|
||||
client.codecommitconn = codecommit.New(usEast1Sess)
|
||||
client.codedeployconn = codedeploy.New(sess)
|
||||
client.dsconn = directoryservice.New(sess)
|
||||
client.dynamodbconn = dynamodb.New(dynamoSess)
|
||||
client.ec2conn = ec2.New(awsEc2Sess)
|
||||
client.ecrconn = ecr.New(sess)
|
||||
client.ecsconn = ecs.New(sess)
|
||||
client.efsconn = efs.New(sess)
|
||||
client.elasticacheconn = elasticache.New(sess)
|
||||
client.elasticbeanstalkconn = elasticbeanstalk.New(sess)
|
||||
client.elastictranscoderconn = elastictranscoder.New(sess)
|
||||
client.elbconn = elb.New(awsElbSess)
|
||||
client.elbv2conn = elbv2.New(awsElbSess)
|
||||
client.emrconn = emr.New(sess)
|
||||
client.esconn = elasticsearch.New(sess)
|
||||
client.firehoseconn = firehose.New(sess)
|
||||
client.glacierconn = glacier.New(sess)
|
||||
client.kinesisconn = kinesis.New(kinesisSess)
|
||||
client.kmsconn = kms.New(sess)
|
||||
client.lambdaconn = lambda.New(sess)
|
||||
client.opsworksconn = opsworks.New(usEast1Sess)
|
||||
client.r53conn = route53.New(usEast1Sess)
|
||||
client.rdsconn = rds.New(sess)
|
||||
client.redshiftconn = redshift.New(sess)
|
||||
client.simpledbconn = simpledb.New(sess)
|
||||
client.s3conn = s3.New(awsS3Sess)
|
||||
client.sesConn = ses.New(sess)
|
||||
client.snsconn = sns.New(sess)
|
||||
client.sqsconn = sqs.New(sess)
|
||||
client.ssmconn = ssm.New(sess)
|
||||
client.wafconn = waf.New(sess)
|
||||
|
||||
return &client, nil
|
||||
}
|
||||
|
||||
// ValidateRegion returns an error if the configured region is not a
|
||||
// valid aws region and nil otherwise.
|
||||
func (c *Config) ValidateRegion() error {
|
||||
var regions = [12]string{"us-east-1", "us-west-2", "us-west-1", "eu-west-1",
|
||||
"eu-central-1", "ap-southeast-1", "ap-southeast-2", "ap-northeast-1",
|
||||
"ap-northeast-2", "sa-east-1", "cn-north-1", "us-gov-west-1"}
|
||||
var regions = []string{
|
||||
"ap-northeast-1",
|
||||
"ap-northeast-2",
|
||||
"ap-south-1",
|
||||
"ap-southeast-1",
|
||||
"ap-southeast-2",
|
||||
"cn-north-1",
|
||||
"eu-central-1",
|
||||
"eu-west-1",
|
||||
"sa-east-1",
|
||||
"us-east-1",
|
||||
"us-east-2",
|
||||
"us-gov-west-1",
|
||||
"us-west-1",
|
||||
"us-west-2",
|
||||
}
|
||||
|
||||
for _, valid := range regions {
|
||||
if c.Region == valid {
|
||||
|
@ -336,24 +326,8 @@ func (c *Config) ValidateRegion() error {
|
|||
}
|
||||
|
||||
// Validate credentials early and fail before we do any graph walking.
|
||||
// In the case of an IAM role/profile with insuffecient privileges, fail
|
||||
// silently
|
||||
func (c *Config) ValidateCredentials(iamconn *iam.IAM) error {
|
||||
_, err := iamconn.GetUser(nil)
|
||||
|
||||
if awsErr, ok := err.(awserr.Error); ok {
|
||||
if awsErr.Code() == "AccessDenied" || awsErr.Code() == "ValidationError" {
|
||||
log.Printf("[WARN] AccessDenied Error with iam.GetUser, assuming IAM role")
|
||||
// User may be an IAM instance profile, or otherwise IAM role without the
|
||||
// GetUser permissions, so fail silently
|
||||
return nil
|
||||
}
|
||||
|
||||
if awsErr.Code() == "SignatureDoesNotMatch" {
|
||||
return fmt.Errorf("Failed authenticating with AWS: please verify credentials")
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Config) ValidateCredentials(stsconn *sts.STS) error {
|
||||
_, err := stsconn.GetCallerIdentity(&sts.GetCallerIdentityInput{})
|
||||
return err
|
||||
}
|
||||
|
||||
|
@ -364,7 +338,7 @@ func (c *Config) ValidateAccountId(accountId string) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
log.Printf("[INFO] Validating account ID")
|
||||
log.Println("[INFO] Validating account ID")
|
||||
|
||||
if c.ForbiddenAccountIds != nil {
|
||||
for _, id := range c.ForbiddenAccountIds {
|
||||
|
@ -391,7 +365,18 @@ func (c *Config) ValidateAccountId(accountId string) error {
|
|||
var addTerraformVersionToUserAgent = request.NamedHandler{
|
||||
Name: "terraform.TerraformVersionUserAgentHandler",
|
||||
Fn: request.MakeAddToUserAgentHandler(
|
||||
"terraform", terraform.Version, terraform.VersionPrerelease),
|
||||
"APN/1.0 HashiCorp/1.0 Terraform", terraform.VersionString()),
|
||||
}
|
||||
|
||||
var debugAuthFailure = request.NamedHandler{
|
||||
Name: "terraform.AuthFailureAdditionalDebugHandler",
|
||||
Fn: func(req *request.Request) {
|
||||
if isAWSErr(req.Error, "AuthFailure", "AWS was not able to validate the provided access credentials") {
|
||||
log.Printf("[INFO] Additional AuthFailure Debugging Context")
|
||||
log.Printf("[INFO] Current system UTC time: %s", time.Now().UTC())
|
||||
log.Printf("[INFO] Request object: %s", spew.Sdump(req))
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
type awsLogger struct{}
|
||||
|
|
|
@ -1,52 +0,0 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"sort"
|
||||
"time"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/service/ec2"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func dataSourceAwsAvailabilityZones() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Read: dataSourceAwsAvailabilityZonesRead,
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"instance": &schema.Schema{
|
||||
Type: schema.TypeList,
|
||||
Computed: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func dataSourceAwsAvailabilityZonesRead(d *schema.ResourceData, meta interface{}) error {
|
||||
conn := meta.(*AWSClient).ec2conn
|
||||
|
||||
log.Printf("[DEBUG] Reading availability zones")
|
||||
d.SetId(time.Now().UTC().String())
|
||||
|
||||
req := &ec2.DescribeAvailabilityZonesInput{DryRun: aws.Bool(false)}
|
||||
azresp, err := conn.DescribeAvailabilityZones(req)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error listing availability zones: %s", err)
|
||||
}
|
||||
|
||||
raw := make([]string, len(azresp.AvailabilityZones))
|
||||
for i, v := range azresp.AvailabilityZones {
|
||||
raw[i] = *v.ZoneName
|
||||
}
|
||||
|
||||
sort.Strings(raw)
|
||||
|
||||
if err := d.Set("instance", raw); err != nil {
|
||||
return fmt.Errorf("[WARN] Error setting availability zones")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
|
@ -1,80 +0,0 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strconv"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
)
|
||||
|
||||
func TestAccAWSAvailabilityZones_basic(t *testing.T) {
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: testAccCheckAwsAvailabilityZonesConfig,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAwsAvailabilityZonesMeta("data.aws_availability_zones.availability_zones"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func testAccCheckAwsAvailabilityZonesMeta(n string) resource.TestCheckFunc {
|
||||
return func(s *terraform.State) error {
|
||||
rs, ok := s.RootModule().Resources[n]
|
||||
if !ok {
|
||||
return fmt.Errorf("Can't find AZ resource: %s", n)
|
||||
}
|
||||
|
||||
if rs.Primary.ID == "" {
|
||||
return fmt.Errorf("AZ resource ID not set")
|
||||
}
|
||||
|
||||
actual, err := testAccCheckAwsAvailabilityZonesBuildAvailable(rs.Primary.Attributes)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
expected := actual
|
||||
sort.Strings(expected)
|
||||
if reflect.DeepEqual(expected, actual) != true {
|
||||
return fmt.Errorf("AZs not sorted - expected %v, got %v", expected, actual)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func testAccCheckAwsAvailabilityZonesBuildAvailable(attrs map[string]string) ([]string, error) {
|
||||
v, ok := attrs["instance.#"]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("Available AZ list is missing")
|
||||
}
|
||||
qty, err := strconv.Atoi(v)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if qty < 1 {
|
||||
return nil, fmt.Errorf("No AZs found in region, this is probably a bug.")
|
||||
}
|
||||
zones := make([]string, qty)
|
||||
for n := range zones {
|
||||
zone, ok := attrs["instance."+strconv.Itoa(n)]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("AZ list corrupt, this is definitely a bug")
|
||||
}
|
||||
zones[n] = zone
|
||||
}
|
||||
return zones, nil
|
||||
}
|
||||
|
||||
const testAccCheckAwsAvailabilityZonesConfig = `
|
||||
data "aws_availability_zones" "availability_zones" {
|
||||
}
|
||||
`
|
|
@ -0,0 +1,73 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/service/acm"
|
||||
"github.com/hashicorp/errwrap"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func dataSourceAwsAcmCertificate() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Read: dataSourceAwsAcmCertificateRead,
|
||||
Schema: map[string]*schema.Schema{
|
||||
"domain": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
"arn": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"statuses": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func dataSourceAwsAcmCertificateRead(d *schema.ResourceData, meta interface{}) error {
|
||||
conn := meta.(*AWSClient).acmconn
|
||||
params := &acm.ListCertificatesInput{}
|
||||
|
||||
target := d.Get("domain")
|
||||
|
||||
statuses, ok := d.GetOk("statuses")
|
||||
if ok {
|
||||
statusStrings := statuses.([]interface{})
|
||||
params.CertificateStatuses = expandStringList(statusStrings)
|
||||
} else {
|
||||
params.CertificateStatuses = []*string{aws.String("ISSUED")}
|
||||
}
|
||||
|
||||
var arns []string
|
||||
err := conn.ListCertificatesPages(params, func(page *acm.ListCertificatesOutput, lastPage bool) bool {
|
||||
for _, cert := range page.CertificateSummaryList {
|
||||
if *cert.DomainName == target {
|
||||
arns = append(arns, *cert.CertificateArn)
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
})
|
||||
if err != nil {
|
||||
return errwrap.Wrapf("Error describing certificates: {{err}}", err)
|
||||
}
|
||||
|
||||
if len(arns) == 0 {
|
||||
return fmt.Errorf("No certificate for domain %q found in this region.", target)
|
||||
}
|
||||
if len(arns) > 1 {
|
||||
return fmt.Errorf("Multiple certificates for domain %q found in this region.", target)
|
||||
}
|
||||
|
||||
d.SetId(time.Now().UTC().String())
|
||||
d.Set("arn", arns[0])
|
||||
|
||||
return nil
|
||||
}
|
|
@ -0,0 +1,46 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
)
|
||||
|
||||
func TestAccAwsAcmCertificateDataSource_noMatchReturnsError(t *testing.T) {
|
||||
domain := "hashicorp.com"
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() {
|
||||
testAccPreCheck(t)
|
||||
},
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
Config: testAccCheckAwsAcmCertificateDataSourceConfig(domain),
|
||||
ExpectError: regexp.MustCompile(`No certificate for domain`),
|
||||
},
|
||||
{
|
||||
Config: testAccCheckAwsAcmCertificateDataSourceConfigWithStatus(domain),
|
||||
ExpectError: regexp.MustCompile(`No certificate for domain`),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func testAccCheckAwsAcmCertificateDataSourceConfig(domain string) string {
|
||||
return fmt.Sprintf(`
|
||||
data "aws_acm_certificate" "test" {
|
||||
domain = "%s"
|
||||
}
|
||||
`, domain)
|
||||
}
|
||||
|
||||
func testAccCheckAwsAcmCertificateDataSourceConfigWithStatus(domain string) string {
|
||||
return fmt.Sprintf(`
|
||||
data "aws_acm_certificate" "test" {
|
||||
domain = "%s"
|
||||
statuses = ["ISSUED"]
|
||||
}
|
||||
`, domain)
|
||||
}
|
|
@ -0,0 +1,127 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/service/elbv2"
|
||||
"github.com/hashicorp/errwrap"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func dataSourceAwsAlb() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Read: dataSourceAwsAlbRead,
|
||||
Schema: map[string]*schema.Schema{
|
||||
"arn": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"arn_suffix": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"name": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"internal": {
|
||||
Type: schema.TypeBool,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"security_groups": {
|
||||
Type: schema.TypeSet,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
Computed: true,
|
||||
Set: schema.HashString,
|
||||
},
|
||||
|
||||
"subnets": {
|
||||
Type: schema.TypeSet,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
Computed: true,
|
||||
Set: schema.HashString,
|
||||
},
|
||||
|
||||
"access_logs": {
|
||||
Type: schema.TypeList,
|
||||
Computed: true,
|
||||
MaxItems: 1,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"bucket": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"prefix": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"enabled": {
|
||||
Type: schema.TypeBool,
|
||||
Computed: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
"enable_deletion_protection": {
|
||||
Type: schema.TypeBool,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"idle_timeout": {
|
||||
Type: schema.TypeInt,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"vpc_id": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"zone_id": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"dns_name": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"tags": tagsSchemaComputed(),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func dataSourceAwsAlbRead(d *schema.ResourceData, meta interface{}) error {
|
||||
elbconn := meta.(*AWSClient).elbv2conn
|
||||
albArn := d.Get("arn").(string)
|
||||
albName := d.Get("name").(string)
|
||||
|
||||
describeAlbOpts := &elbv2.DescribeLoadBalancersInput{}
|
||||
switch {
|
||||
case albArn != "":
|
||||
describeAlbOpts.LoadBalancerArns = []*string{aws.String(albArn)}
|
||||
case albName != "":
|
||||
describeAlbOpts.Names = []*string{aws.String(albName)}
|
||||
}
|
||||
|
||||
describeResp, err := elbconn.DescribeLoadBalancers(describeAlbOpts)
|
||||
if err != nil {
|
||||
return errwrap.Wrapf("Error retrieving ALB: {{err}}", err)
|
||||
}
|
||||
if len(describeResp.LoadBalancers) != 1 {
|
||||
return fmt.Errorf("Search returned %d results, please revise so only one is returned", len(describeResp.LoadBalancers))
|
||||
}
|
||||
d.SetId(*describeResp.LoadBalancers[0].LoadBalancerArn)
|
||||
|
||||
return flattenAwsAlbResource(d, meta, describeResp.LoadBalancers[0])
|
||||
}
|
|
@ -0,0 +1,62 @@
|
|||
package aws
|
||||
|
||||
import "github.com/hashicorp/terraform/helper/schema"
|
||||
|
||||
func dataSourceAwsAlbListener() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Read: dataSourceAwsAlbListenerRead,
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"arn": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
|
||||
"load_balancer_arn": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"port": {
|
||||
Type: schema.TypeInt,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"protocol": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"ssl_policy": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"certificate_arn": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"default_action": {
|
||||
Type: schema.TypeList,
|
||||
Computed: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"target_group_arn": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"type": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func dataSourceAwsAlbListenerRead(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId(d.Get("arn").(string))
|
||||
return resourceAwsAlbListenerRead(d, meta)
|
||||
}
|
|
@ -0,0 +1,318 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math/rand"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/acctest"
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
)
|
||||
|
||||
func TestAccDataSourceAWSALBListener_basic(t *testing.T) {
|
||||
albName := fmt.Sprintf("testlistener-basic-%s", acctest.RandStringFromCharSet(13, acctest.CharSetAlphaNum))
|
||||
targetGroupName := fmt.Sprintf("testtargetgroup-%s", acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum))
|
||||
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
Config: testAccDataSourceAWSALBListenerConfigBasic(albName, targetGroupName),
|
||||
Check: resource.ComposeAggregateTestCheckFunc(
|
||||
resource.TestCheckResourceAttrSet("data.aws_alb_listener.front_end", "load_balancer_arn"),
|
||||
resource.TestCheckResourceAttrSet("data.aws_alb_listener.front_end", "arn"),
|
||||
resource.TestCheckResourceAttr("data.aws_alb_listener.front_end", "protocol", "HTTP"),
|
||||
resource.TestCheckResourceAttr("data.aws_alb_listener.front_end", "port", "80"),
|
||||
resource.TestCheckResourceAttr("data.aws_alb_listener.front_end", "default_action.#", "1"),
|
||||
resource.TestCheckResourceAttr("data.aws_alb_listener.front_end", "default_action.0.type", "forward"),
|
||||
resource.TestCheckResourceAttrSet("data.aws_alb_listener.front_end", "default_action.0.target_group_arn"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestAccDataSourceAWSALBListener_https(t *testing.T) {
|
||||
albName := fmt.Sprintf("testlistener-https-%s", acctest.RandStringFromCharSet(13, acctest.CharSetAlphaNum))
|
||||
targetGroupName := fmt.Sprintf("testtargetgroup-%s", acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum))
|
||||
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
Config: testAccDataSourceAWSALBListenerConfigHTTPS(albName, targetGroupName),
|
||||
Check: resource.ComposeAggregateTestCheckFunc(
|
||||
resource.TestCheckResourceAttrSet("data.aws_alb_listener.front_end", "load_balancer_arn"),
|
||||
resource.TestCheckResourceAttrSet("data.aws_alb_listener.front_end", "arn"),
|
||||
resource.TestCheckResourceAttr("data.aws_alb_listener.front_end", "protocol", "HTTPS"),
|
||||
resource.TestCheckResourceAttr("data.aws_alb_listener.front_end", "port", "443"),
|
||||
resource.TestCheckResourceAttr("data.aws_alb_listener.front_end", "default_action.#", "1"),
|
||||
resource.TestCheckResourceAttr("data.aws_alb_listener.front_end", "default_action.0.type", "forward"),
|
||||
resource.TestCheckResourceAttrSet("data.aws_alb_listener.front_end", "default_action.0.target_group_arn"),
|
||||
resource.TestCheckResourceAttrSet("data.aws_alb_listener.front_end", "certificate_arn"),
|
||||
resource.TestCheckResourceAttr("data.aws_alb_listener.front_end", "ssl_policy", "ELBSecurityPolicy-2015-05"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func testAccDataSourceAWSALBListenerConfigBasic(albName, targetGroupName string) string {
|
||||
return fmt.Sprintf(`resource "aws_alb_listener" "front_end" {
|
||||
load_balancer_arn = "${aws_alb.alb_test.id}"
|
||||
protocol = "HTTP"
|
||||
port = "80"
|
||||
|
||||
default_action {
|
||||
target_group_arn = "${aws_alb_target_group.test.id}"
|
||||
type = "forward"
|
||||
}
|
||||
}
|
||||
|
||||
resource "aws_alb" "alb_test" {
|
||||
name = "%s"
|
||||
internal = false
|
||||
security_groups = ["${aws_security_group.alb_test.id}"]
|
||||
subnets = ["${aws_subnet.alb_test.*.id}"]
|
||||
|
||||
idle_timeout = 30
|
||||
enable_deletion_protection = false
|
||||
|
||||
tags {
|
||||
TestName = "TestAccAWSALB_basic"
|
||||
}
|
||||
}
|
||||
|
||||
resource "aws_alb_target_group" "test" {
|
||||
name = "%s"
|
||||
port = 8080
|
||||
protocol = "HTTP"
|
||||
vpc_id = "${aws_vpc.alb_test.id}"
|
||||
|
||||
health_check {
|
||||
path = "/health"
|
||||
interval = 60
|
||||
port = 8081
|
||||
protocol = "HTTP"
|
||||
timeout = 3
|
||||
healthy_threshold = 3
|
||||
unhealthy_threshold = 3
|
||||
matcher = "200-299"
|
||||
}
|
||||
}
|
||||
|
||||
variable "subnets" {
|
||||
default = ["10.0.1.0/24", "10.0.2.0/24"]
|
||||
type = "list"
|
||||
}
|
||||
|
||||
data "aws_availability_zones" "available" {}
|
||||
|
||||
resource "aws_vpc" "alb_test" {
|
||||
cidr_block = "10.0.0.0/16"
|
||||
|
||||
tags {
|
||||
TestName = "TestAccAWSALB_basic"
|
||||
}
|
||||
}
|
||||
|
||||
resource "aws_subnet" "alb_test" {
|
||||
count = 2
|
||||
vpc_id = "${aws_vpc.alb_test.id}"
|
||||
cidr_block = "${element(var.subnets, count.index)}"
|
||||
map_public_ip_on_launch = true
|
||||
availability_zone = "${element(data.aws_availability_zones.available.names, count.index)}"
|
||||
|
||||
tags {
|
||||
TestName = "TestAccAWSALB_basic"
|
||||
}
|
||||
}
|
||||
|
||||
resource "aws_security_group" "alb_test" {
|
||||
name = "allow_all_alb_test"
|
||||
description = "Used for ALB Testing"
|
||||
vpc_id = "${aws_vpc.alb_test.id}"
|
||||
|
||||
ingress {
|
||||
from_port = 0
|
||||
to_port = 0
|
||||
protocol = "-1"
|
||||
cidr_blocks = ["0.0.0.0/0"]
|
||||
}
|
||||
|
||||
egress {
|
||||
from_port = 0
|
||||
to_port = 0
|
||||
protocol = "-1"
|
||||
cidr_blocks = ["0.0.0.0/0"]
|
||||
}
|
||||
|
||||
tags {
|
||||
TestName = "TestAccAWSALB_basic"
|
||||
}
|
||||
}
|
||||
|
||||
data "aws_alb_listener" "front_end" {
|
||||
arn = "${aws_alb_listener.front_end.arn}"
|
||||
}`, albName, targetGroupName)
|
||||
}
|
||||
|
||||
func testAccDataSourceAWSALBListenerConfigHTTPS(albName, targetGroupName string) string {
|
||||
return fmt.Sprintf(`resource "aws_alb_listener" "front_end" {
|
||||
load_balancer_arn = "${aws_alb.alb_test.id}"
|
||||
protocol = "HTTPS"
|
||||
port = "443"
|
||||
ssl_policy = "ELBSecurityPolicy-2015-05"
|
||||
certificate_arn = "${aws_iam_server_certificate.test_cert.arn}"
|
||||
|
||||
default_action {
|
||||
target_group_arn = "${aws_alb_target_group.test.id}"
|
||||
type = "forward"
|
||||
}
|
||||
}
|
||||
|
||||
resource "aws_alb" "alb_test" {
|
||||
name = "%s"
|
||||
internal = false
|
||||
security_groups = ["${aws_security_group.alb_test.id}"]
|
||||
subnets = ["${aws_subnet.alb_test.*.id}"]
|
||||
|
||||
idle_timeout = 30
|
||||
enable_deletion_protection = false
|
||||
|
||||
tags {
|
||||
TestName = "TestAccAWSALB_basic"
|
||||
}
|
||||
}
|
||||
|
||||
resource "aws_alb_target_group" "test" {
|
||||
name = "%s"
|
||||
port = 8080
|
||||
protocol = "HTTP"
|
||||
vpc_id = "${aws_vpc.alb_test.id}"
|
||||
|
||||
health_check {
|
||||
path = "/health"
|
||||
interval = 60
|
||||
port = 8081
|
||||
protocol = "HTTP"
|
||||
timeout = 3
|
||||
healthy_threshold = 3
|
||||
unhealthy_threshold = 3
|
||||
matcher = "200-299"
|
||||
}
|
||||
}
|
||||
|
||||
variable "subnets" {
|
||||
default = ["10.0.1.0/24", "10.0.2.0/24"]
|
||||
type = "list"
|
||||
}
|
||||
|
||||
data "aws_availability_zones" "available" {}
|
||||
|
||||
resource "aws_vpc" "alb_test" {
|
||||
cidr_block = "10.0.0.0/16"
|
||||
|
||||
tags {
|
||||
TestName = "TestAccAWSALB_basic"
|
||||
}
|
||||
}
|
||||
|
||||
resource "aws_subnet" "alb_test" {
|
||||
count = 2
|
||||
vpc_id = "${aws_vpc.alb_test.id}"
|
||||
cidr_block = "${element(var.subnets, count.index)}"
|
||||
map_public_ip_on_launch = true
|
||||
availability_zone = "${element(data.aws_availability_zones.available.names, count.index)}"
|
||||
|
||||
tags {
|
||||
TestName = "TestAccAWSALB_basic"
|
||||
}
|
||||
}
|
||||
|
||||
resource "aws_security_group" "alb_test" {
|
||||
name = "allow_all_alb_test"
|
||||
description = "Used for ALB Testing"
|
||||
vpc_id = "${aws_vpc.alb_test.id}"
|
||||
|
||||
ingress {
|
||||
from_port = 0
|
||||
to_port = 0
|
||||
protocol = "-1"
|
||||
cidr_blocks = ["0.0.0.0/0"]
|
||||
}
|
||||
|
||||
egress {
|
||||
from_port = 0
|
||||
to_port = 0
|
||||
protocol = "-1"
|
||||
cidr_blocks = ["0.0.0.0/0"]
|
||||
}
|
||||
|
||||
tags {
|
||||
TestName = "TestAccAWSALB_basic"
|
||||
}
|
||||
}
|
||||
|
||||
resource "aws_iam_server_certificate" "test_cert" {
|
||||
name = "terraform-test-cert-%d"
|
||||
certificate_body = <<EOF
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDBjCCAe4CCQCGWwBmOiHQdTANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJB
|
||||
VTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0
|
||||
cyBQdHkgTHRkMB4XDTE2MDYyMTE2MzM0MVoXDTE3MDYyMTE2MzM0MVowRTELMAkG
|
||||
A1UEBhMCQVUxEzARBgNVBAgTClNvbWUtU3RhdGUxITAfBgNVBAoTGEludGVybmV0
|
||||
IFdpZGdpdHMgUHR5IEx0ZDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
|
||||
AL+LFlsCJG5txZp4yuu+lQnuUrgBXRG+irQqcTXlV91Bp5hpmRIyhnGCtWxxDBUL
|
||||
xrh4WN3VV/0jDzKT976oLgOy3hj56Cdqf+JlZ1qgMN5bHB3mm3aVWnrnsLbBsfwZ
|
||||
SEbk3Kht/cE1nK2toNVW+rznS3m+eoV3Zn/DUNwGlZr42hGNs6ETn2jURY78ETqR
|
||||
mW47xvjf86eIo7vULHJaY6xyarPqkL8DZazOmvY06hUGvGwGBny7gugfXqDG+I8n
|
||||
cPBsGJGSAmHmVV8o0RCB9UjY+TvSMQRpEDoVlvyrGuglsD8to/4+7UcsuDGlRYN6
|
||||
jmIOC37mOi/jwRfWL1YUa4MCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAPDxTH0oQ
|
||||
JjKXoJgkmQxurB81RfnK/NrswJVzWbOv6ejcbhwh+/ZgJTMc15BrYcxU6vUW1V/i
|
||||
Z7APU0qJ0icECACML+a2fRI7YdLCTiPIOmY66HY8MZHAn3dGjU5TeiUflC0n0zkP
|
||||
mxKJe43kcYLNDItbfvUDo/GoxTXrC3EFVZyU0RhFzoVJdODlTHXMVFCzcbQEBrBJ
|
||||
xKdShCEc8nFMneZcGFeEU488ntZoWzzms8/QpYrKa5S0Sd7umEU2Kwu4HTkvUFg/
|
||||
CqDUFjhydXxYRsxXBBrEiLOE5BdtJR1sH/QHxIJe23C9iHI2nS1NbLziNEApLwC4
|
||||
GnSud83VUo9G9w==
|
||||
-----END CERTIFICATE-----
|
||||
EOF
|
||||
|
||||
private_key = <<EOF
|
||||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIEowIBAAKCAQEAv4sWWwIkbm3FmnjK676VCe5SuAFdEb6KtCpxNeVX3UGnmGmZ
|
||||
EjKGcYK1bHEMFQvGuHhY3dVX/SMPMpP3vqguA7LeGPnoJ2p/4mVnWqAw3lscHeab
|
||||
dpVaeuewtsGx/BlIRuTcqG39wTWcra2g1Vb6vOdLeb56hXdmf8NQ3AaVmvjaEY2z
|
||||
oROfaNRFjvwROpGZbjvG+N/zp4iju9QsclpjrHJqs+qQvwNlrM6a9jTqFQa8bAYG
|
||||
fLuC6B9eoMb4jydw8GwYkZICYeZVXyjREIH1SNj5O9IxBGkQOhWW/Ksa6CWwPy2j
|
||||
/j7tRyy4MaVFg3qOYg4LfuY6L+PBF9YvVhRrgwIDAQABAoIBAFqJ4h1Om+3e0WK8
|
||||
6h4YzdYN4ue7LUTv7hxPW4gASlH5cMDoWURywX3yLNN/dBiWom4b5NWmvJqY8dwU
|
||||
eSyTznxNFhJ0PjozaxOWnw4FXlQceOPhV2bsHgKudadNU1Y4lSN9lpe+tg2Xy+GE
|
||||
ituM66RTKCf502w3DioiJpx6OEkxuhrnsQAWNcGB0MnTukm2f+629V+04R5MT5V1
|
||||
nY+5Phx2BpHgYzWBKh6Px1puu7xFv5SMQda1ndlPIKb4cNp0yYn+1lHNjbOE7QL/
|
||||
oEpWgrauS5Zk/APK33v/p3wVYHrKocIFHlPiCW0uIJJLsOZDY8pQXpTlc+/xGLLy
|
||||
WBu4boECgYEA6xO+1UNh6ndJ3xGuNippH+ucTi/uq1+0tG1bd63v+75tn5l4LyY2
|
||||
CWHRaWVlVn+WnDslkQTJzFD68X+9M7Cc4oP6WnhTyPamG7HlGv5JxfFHTC9GOKmz
|
||||
sSc624BDmqYJ7Xzyhe5kc3iHzqG/L72ZF1aijZdrodQMSY1634UX6aECgYEA0Jdr
|
||||
cBPSN+mgmEY6ogN5h7sO5uNV3TQQtW2IslfWZn6JhSRF4Rf7IReng48CMy9ZhFBy
|
||||
Q7H2I1pDGjEC9gQHhgVfm+FyMSVqXfCHEW/97pvvu9ougHA0MhPep1twzTGrqg+K
|
||||
f3PLW8hVkGyCrTfWgbDlPsHgsocA/wTaQOheaqMCgYBat5z+WemQfQZh8kXDm2xE
|
||||
KD2Cota9BcsLkeQpdFNXWC6f167cqydRSZFx1fJchhJOKjkeFLX3hgzBY6VVLEPu
|
||||
2jWj8imLNTv3Fhiu6RD5NVppWRkFRuAUbmo1SPNN2+Oa5YwGCXB0a0Alip/oQYex
|
||||
zPogIB4mLlmrjNCtL4SB4QKBgCEHKMrZSJrz0irqS9RlanPUaZqjenAJE3A2xMNA
|
||||
Z0FZXdsIEEyA6JGn1i1dkoKaR7lMp5sSbZ/RZfiatBZSMwLEjQv4mYUwoHP5Ztma
|
||||
+wEyDbaX6G8L1Sfsv3+OWgETkVPfHBXsNtH0mZ/BnrtgsQVeBh52wmZiPAUlNo26
|
||||
fWCzAoGBAJOjqovLelLWzyQGqPFx/MwuI56UFXd1CmFlCIvF2WxCFmk3tlExoCN1
|
||||
HqSpt92vsgYgV7+lAb4U7Uy/v012gwiU1LK+vyAE9geo3pTjG73BNzG4H547xtbY
|
||||
dg+Sd4Wjm89UQoUUoiIcstY7FPbqfBtYKfh4RYHAHV2BwDFqzZCM
|
||||
-----END RSA PRIVATE KEY-----
|
||||
EOF
|
||||
}
|
||||
|
||||
data "aws_alb_listener" "front_end" {
|
||||
arn = "${aws_alb_listener.front_end.arn}"
|
||||
}`, albName, targetGroupName, rand.New(rand.NewSource(time.Now().UnixNano())).Int())
|
||||
}
|
|
@ -0,0 +1,124 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/acctest"
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
)
|
||||
|
||||
func TestAccDataSourceAWSALB_basic(t *testing.T) {
|
||||
albName := fmt.Sprintf("testaccawsalb-basic-%s", acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum))
|
||||
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
Config: testAccDataSourceAWSALBConfigBasic(albName),
|
||||
Check: resource.ComposeAggregateTestCheckFunc(
|
||||
resource.TestCheckResourceAttr("data.aws_alb.alb_test_with_arn", "name", albName),
|
||||
resource.TestCheckResourceAttr("data.aws_alb.alb_test_with_arn", "internal", "false"),
|
||||
resource.TestCheckResourceAttr("data.aws_alb.alb_test_with_arn", "subnets.#", "2"),
|
||||
resource.TestCheckResourceAttr("data.aws_alb.alb_test_with_arn", "security_groups.#", "1"),
|
||||
resource.TestCheckResourceAttr("data.aws_alb.alb_test_with_arn", "tags.%", "1"),
|
||||
resource.TestCheckResourceAttr("data.aws_alb.alb_test_with_arn", "tags.TestName", "TestAccAWSALB_basic"),
|
||||
resource.TestCheckResourceAttr("data.aws_alb.alb_test_with_arn", "enable_deletion_protection", "false"),
|
||||
resource.TestCheckResourceAttr("data.aws_alb.alb_test_with_arn", "idle_timeout", "30"),
|
||||
resource.TestCheckResourceAttrSet("data.aws_alb.alb_test_with_arn", "vpc_id"),
|
||||
resource.TestCheckResourceAttrSet("data.aws_alb.alb_test_with_arn", "zone_id"),
|
||||
resource.TestCheckResourceAttrSet("data.aws_alb.alb_test_with_arn", "dns_name"),
|
||||
resource.TestCheckResourceAttrSet("data.aws_alb.alb_test_with_arn", "arn"),
|
||||
resource.TestCheckResourceAttr("data.aws_alb.alb_test_with_name", "name", albName),
|
||||
resource.TestCheckResourceAttr("data.aws_alb.alb_test_with_name", "internal", "false"),
|
||||
resource.TestCheckResourceAttr("data.aws_alb.alb_test_with_name", "subnets.#", "2"),
|
||||
resource.TestCheckResourceAttr("data.aws_alb.alb_test_with_name", "security_groups.#", "1"),
|
||||
resource.TestCheckResourceAttr("data.aws_alb.alb_test_with_name", "tags.%", "1"),
|
||||
resource.TestCheckResourceAttr("data.aws_alb.alb_test_with_name", "tags.TestName", "TestAccAWSALB_basic"),
|
||||
resource.TestCheckResourceAttr("data.aws_alb.alb_test_with_name", "enable_deletion_protection", "false"),
|
||||
resource.TestCheckResourceAttr("data.aws_alb.alb_test_with_name", "idle_timeout", "30"),
|
||||
resource.TestCheckResourceAttrSet("data.aws_alb.alb_test_with_name", "vpc_id"),
|
||||
resource.TestCheckResourceAttrSet("data.aws_alb.alb_test_with_name", "zone_id"),
|
||||
resource.TestCheckResourceAttrSet("data.aws_alb.alb_test_with_name", "dns_name"),
|
||||
resource.TestCheckResourceAttrSet("data.aws_alb.alb_test_with_name", "arn"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func testAccDataSourceAWSALBConfigBasic(albName string) string {
|
||||
return fmt.Sprintf(`resource "aws_alb" "alb_test" {
|
||||
name = "%s"
|
||||
internal = false
|
||||
security_groups = ["${aws_security_group.alb_test.id}"]
|
||||
subnets = ["${aws_subnet.alb_test.*.id}"]
|
||||
|
||||
idle_timeout = 30
|
||||
enable_deletion_protection = false
|
||||
|
||||
tags {
|
||||
TestName = "TestAccAWSALB_basic"
|
||||
}
|
||||
}
|
||||
|
||||
variable "subnets" {
|
||||
default = ["10.0.1.0/24", "10.0.2.0/24"]
|
||||
type = "list"
|
||||
}
|
||||
|
||||
data "aws_availability_zones" "available" {}
|
||||
|
||||
resource "aws_vpc" "alb_test" {
|
||||
cidr_block = "10.0.0.0/16"
|
||||
|
||||
tags {
|
||||
TestName = "TestAccAWSALB_basic"
|
||||
}
|
||||
}
|
||||
|
||||
resource "aws_subnet" "alb_test" {
|
||||
count = 2
|
||||
vpc_id = "${aws_vpc.alb_test.id}"
|
||||
cidr_block = "${element(var.subnets, count.index)}"
|
||||
map_public_ip_on_launch = true
|
||||
availability_zone = "${element(data.aws_availability_zones.available.names, count.index)}"
|
||||
|
||||
tags {
|
||||
TestName = "TestAccAWSALB_basic"
|
||||
}
|
||||
}
|
||||
|
||||
resource "aws_security_group" "alb_test" {
|
||||
name = "allow_all_alb_test"
|
||||
description = "Used for ALB Testing"
|
||||
vpc_id = "${aws_vpc.alb_test.id}"
|
||||
|
||||
ingress {
|
||||
from_port = 0
|
||||
to_port = 0
|
||||
protocol = "-1"
|
||||
cidr_blocks = ["0.0.0.0/0"]
|
||||
}
|
||||
|
||||
egress {
|
||||
from_port = 0
|
||||
to_port = 0
|
||||
protocol = "-1"
|
||||
cidr_blocks = ["0.0.0.0/0"]
|
||||
}
|
||||
|
||||
tags {
|
||||
TestName = "TestAccAWSALB_basic"
|
||||
}
|
||||
}
|
||||
|
||||
data "aws_alb" "alb_test_with_arn" {
|
||||
arn = "${aws_alb.alb_test.arn}"
|
||||
}
|
||||
|
||||
data "aws_alb" "alb_test_with_name" {
|
||||
name = "${aws_alb.alb_test.name}"
|
||||
}`, albName)
|
||||
}
|
|
@ -4,10 +4,10 @@ import (
|
|||
"bytes"
|
||||
"fmt"
|
||||
"log"
|
||||
"regexp"
|
||||
"sort"
|
||||
"time"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/service/ec2"
|
||||
"github.com/hashicorp/terraform/helper/hashcode"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
|
@ -18,184 +18,156 @@ func dataSourceAwsAmi() *schema.Resource {
|
|||
Read: dataSourceAwsAmiRead,
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"executable_users": &schema.Schema{
|
||||
"filter": dataSourceFiltersSchema(),
|
||||
"executable_users": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
},
|
||||
"filter": &schema.Schema{
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"name": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
|
||||
"values": &schema.Schema{
|
||||
Type: schema.TypeList,
|
||||
Required: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
},
|
||||
},
|
||||
},
|
||||
"name_regex": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
ValidateFunc: validateNameRegex,
|
||||
},
|
||||
"most_recent": &schema.Schema{
|
||||
"most_recent": {
|
||||
Type: schema.TypeBool,
|
||||
Optional: true,
|
||||
Default: false,
|
||||
ForceNew: true,
|
||||
},
|
||||
"owners": &schema.Schema{
|
||||
"owners": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
},
|
||||
// Computed values.
|
||||
"architecture": &schema.Schema{
|
||||
"architecture": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"creation_date": &schema.Schema{
|
||||
"creation_date": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"description": &schema.Schema{
|
||||
"description": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"hypervisor": &schema.Schema{
|
||||
"hypervisor": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"image_id": &schema.Schema{
|
||||
"image_id": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"image_location": &schema.Schema{
|
||||
"image_location": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"image_owner_alias": &schema.Schema{
|
||||
"image_owner_alias": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"image_type": &schema.Schema{
|
||||
"image_type": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"kernel_id": &schema.Schema{
|
||||
"kernel_id": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"name": &schema.Schema{
|
||||
"name": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"owner_id": &schema.Schema{
|
||||
"owner_id": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"platform": &schema.Schema{
|
||||
"platform": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"public": &schema.Schema{
|
||||
"public": {
|
||||
Type: schema.TypeBool,
|
||||
Computed: true,
|
||||
},
|
||||
"ramdisk_id": &schema.Schema{
|
||||
"ramdisk_id": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"root_device_name": &schema.Schema{
|
||||
"root_device_name": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"root_device_type": &schema.Schema{
|
||||
"root_device_type": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"sriov_net_support": &schema.Schema{
|
||||
"sriov_net_support": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"state": &schema.Schema{
|
||||
"state": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"virtualization_type": &schema.Schema{
|
||||
"virtualization_type": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
// Complex computed values
|
||||
"block_device_mappings": &schema.Schema{
|
||||
"block_device_mappings": {
|
||||
Type: schema.TypeSet,
|
||||
Computed: true,
|
||||
Set: amiBlockDeviceMappingHash,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"device_name": &schema.Schema{
|
||||
"device_name": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"no_device": &schema.Schema{
|
||||
"no_device": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"virtual_name": &schema.Schema{
|
||||
"virtual_name": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"ebs": &schema.Schema{
|
||||
"ebs": {
|
||||
Type: schema.TypeMap,
|
||||
Computed: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"product_codes": &schema.Schema{
|
||||
"product_codes": {
|
||||
Type: schema.TypeSet,
|
||||
Computed: true,
|
||||
Set: amiProductCodesHash,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"product_code_id": &schema.Schema{
|
||||
"product_code_id": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"product_code_type": &schema.Schema{
|
||||
"product_code_type": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"state_reason": &schema.Schema{
|
||||
"state_reason": {
|
||||
Type: schema.TypeMap,
|
||||
Computed: true,
|
||||
},
|
||||
"tags": &schema.Schema{
|
||||
Type: schema.TypeSet,
|
||||
Computed: true,
|
||||
Set: amiTagsHash,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"key": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"value": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"tags": dataSourceTagsSchema(),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -206,10 +178,11 @@ func dataSourceAwsAmiRead(d *schema.ResourceData, meta interface{}) error {
|
|||
|
||||
executableUsers, executableUsersOk := d.GetOk("executable_users")
|
||||
filters, filtersOk := d.GetOk("filter")
|
||||
nameRegex, nameRegexOk := d.GetOk("name_regex")
|
||||
owners, ownersOk := d.GetOk("owners")
|
||||
|
||||
if executableUsersOk == false && filtersOk == false && ownersOk == false {
|
||||
return fmt.Errorf("One of executable_users, filters, or owners must be assigned")
|
||||
if executableUsersOk == false && filtersOk == false && nameRegexOk == false && ownersOk == false {
|
||||
return fmt.Errorf("One of executable_users, filters, name_regex, or owners must be assigned")
|
||||
}
|
||||
|
||||
params := &ec2.DescribeImagesInput{}
|
||||
|
@ -217,7 +190,7 @@ func dataSourceAwsAmiRead(d *schema.ResourceData, meta interface{}) error {
|
|||
params.ExecutableUsers = expandStringList(executableUsers.([]interface{}))
|
||||
}
|
||||
if filtersOk {
|
||||
params.Filters = buildAmiFilters(filters.(*schema.Set))
|
||||
params.Filters = buildAwsDataSourceFilters(filters.(*schema.Set))
|
||||
}
|
||||
if ownersOk {
|
||||
params.Owners = expandStringList(owners.([]interface{}))
|
||||
|
@ -227,39 +200,49 @@ func dataSourceAwsAmiRead(d *schema.ResourceData, meta interface{}) error {
|
|||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var image *ec2.Image
|
||||
if len(resp.Images) < 1 {
|
||||
return fmt.Errorf("Your query returned no results. Please change your filters and try again.")
|
||||
} else if len(resp.Images) > 1 {
|
||||
if (d.Get("most_recent").(bool)) == true {
|
||||
log.Printf("[DEBUG] aws_ami - multiple results found and most_recent is set")
|
||||
image = mostRecentAmi(resp.Images)
|
||||
} else {
|
||||
log.Printf("[DEBUG] aws_ami - multiple results found and most_recent not set")
|
||||
return fmt.Errorf("Your query returned more than one result. Please try a more specific search, or set most_recent to true.")
|
||||
|
||||
var filteredImages []*ec2.Image
|
||||
if nameRegexOk {
|
||||
r := regexp.MustCompile(nameRegex.(string))
|
||||
for _, image := range resp.Images {
|
||||
// Check for a very rare case where the response would include no
|
||||
// image name. No name means nothing to attempt a match against,
|
||||
// therefore we are skipping such image.
|
||||
if image.Name == nil || *image.Name == "" {
|
||||
log.Printf("[WARN] Unable to find AMI name to match against "+
|
||||
"for image ID %q owned by %q, nothing to do.",
|
||||
*image.ImageId, *image.OwnerId)
|
||||
continue
|
||||
}
|
||||
if r.MatchString(*image.Name) {
|
||||
filteredImages = append(filteredImages, image)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log.Printf("[DEBUG] aws_ami - Single AMI found: %s", *resp.Images[0].ImageId)
|
||||
image = resp.Images[0]
|
||||
filteredImages = resp.Images[:]
|
||||
}
|
||||
return amiDescriptionAttributes(d, image)
|
||||
}
|
||||
|
||||
// Build a slice of AMI filter options from the filters provided.
|
||||
func buildAmiFilters(set *schema.Set) []*ec2.Filter {
|
||||
var filters []*ec2.Filter
|
||||
for _, v := range set.List() {
|
||||
m := v.(map[string]interface{})
|
||||
var filterValues []*string
|
||||
for _, e := range m["values"].([]interface{}) {
|
||||
filterValues = append(filterValues, aws.String(e.(string)))
|
||||
}
|
||||
filters = append(filters, &ec2.Filter{
|
||||
Name: aws.String(m["name"].(string)),
|
||||
Values: filterValues,
|
||||
})
|
||||
var image *ec2.Image
|
||||
if len(filteredImages) < 1 {
|
||||
return fmt.Errorf("Your query returned no results. Please change your search criteria and try again.")
|
||||
}
|
||||
return filters
|
||||
|
||||
if len(filteredImages) > 1 {
|
||||
recent := d.Get("most_recent").(bool)
|
||||
log.Printf("[DEBUG] aws_ami - multiple results found and `most_recent` is set to: %t", recent)
|
||||
if recent {
|
||||
image = mostRecentAmi(filteredImages)
|
||||
} else {
|
||||
return fmt.Errorf("Your query returned more than one result. Please try a more " +
|
||||
"specific search criteria, or set `most_recent` attribute to true.")
|
||||
}
|
||||
} else {
|
||||
// Query returned single result.
|
||||
image = filteredImages[0]
|
||||
}
|
||||
|
||||
log.Printf("[DEBUG] aws_ami - Single AMI found: %s", *image.ImageId)
|
||||
return amiDescriptionAttributes(d, image)
|
||||
}
|
||||
|
||||
type imageSort []*ec2.Image
|
||||
|
@ -283,39 +266,39 @@ func mostRecentAmi(images []*ec2.Image) *ec2.Image {
|
|||
func amiDescriptionAttributes(d *schema.ResourceData, image *ec2.Image) error {
|
||||
// Simple attributes first
|
||||
d.SetId(*image.ImageId)
|
||||
d.Set("architecture", *image.Architecture)
|
||||
d.Set("creation_date", *image.CreationDate)
|
||||
d.Set("architecture", image.Architecture)
|
||||
d.Set("creation_date", image.CreationDate)
|
||||
if image.Description != nil {
|
||||
d.Set("description", *image.Description)
|
||||
d.Set("description", image.Description)
|
||||
}
|
||||
d.Set("hypervisor", *image.Hypervisor)
|
||||
d.Set("image_id", *image.ImageId)
|
||||
d.Set("image_location", *image.ImageLocation)
|
||||
d.Set("hypervisor", image.Hypervisor)
|
||||
d.Set("image_id", image.ImageId)
|
||||
d.Set("image_location", image.ImageLocation)
|
||||
if image.ImageOwnerAlias != nil {
|
||||
d.Set("image_owner_alias", *image.ImageOwnerAlias)
|
||||
d.Set("image_owner_alias", image.ImageOwnerAlias)
|
||||
}
|
||||
d.Set("image_type", *image.ImageType)
|
||||
d.Set("image_type", image.ImageType)
|
||||
if image.KernelId != nil {
|
||||
d.Set("kernel_id", *image.KernelId)
|
||||
d.Set("kernel_id", image.KernelId)
|
||||
}
|
||||
d.Set("name", *image.Name)
|
||||
d.Set("owner_id", *image.OwnerId)
|
||||
d.Set("name", image.Name)
|
||||
d.Set("owner_id", image.OwnerId)
|
||||
if image.Platform != nil {
|
||||
d.Set("platform", *image.Platform)
|
||||
d.Set("platform", image.Platform)
|
||||
}
|
||||
d.Set("public", *image.Public)
|
||||
d.Set("public", image.Public)
|
||||
if image.RamdiskId != nil {
|
||||
d.Set("ramdisk_id", *image.RamdiskId)
|
||||
d.Set("ramdisk_id", image.RamdiskId)
|
||||
}
|
||||
if image.RootDeviceName != nil {
|
||||
d.Set("root_device_name", *image.RootDeviceName)
|
||||
d.Set("root_device_name", image.RootDeviceName)
|
||||
}
|
||||
d.Set("root_device_type", *image.RootDeviceType)
|
||||
d.Set("root_device_type", image.RootDeviceType)
|
||||
if image.SriovNetSupport != nil {
|
||||
d.Set("sriov_net_support", *image.SriovNetSupport)
|
||||
d.Set("sriov_net_support", image.SriovNetSupport)
|
||||
}
|
||||
d.Set("state", *image.State)
|
||||
d.Set("virtualization_type", *image.VirtualizationType)
|
||||
d.Set("state", image.State)
|
||||
d.Set("virtualization_type", image.VirtualizationType)
|
||||
// Complex types get their own functions
|
||||
if err := d.Set("block_device_mappings", amiBlockDeviceMappings(image.BlockDeviceMappings)); err != nil {
|
||||
return err
|
||||
|
@ -326,7 +309,7 @@ func amiDescriptionAttributes(d *schema.ResourceData, image *ec2.Image) error {
|
|||
if err := d.Set("state_reason", amiStateReason(image.StateReason)); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.Set("tags", amiTags(image.Tags)); err != nil {
|
||||
if err := d.Set("tags", dataSourceTags(image.Tags)); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
|
@ -398,21 +381,6 @@ func amiStateReason(m *ec2.StateReason) map[string]interface{} {
|
|||
return s
|
||||
}
|
||||
|
||||
// Returns a set of tags.
|
||||
func amiTags(m []*ec2.Tag) *schema.Set {
|
||||
s := &schema.Set{
|
||||
F: amiTagsHash,
|
||||
}
|
||||
for _, v := range m {
|
||||
tag := map[string]interface{}{
|
||||
"key": *v.Key,
|
||||
"value": *v.Value,
|
||||
}
|
||||
s.Add(tag)
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// Generates a hash for the set hash function used by the block_device_mappings
|
||||
// attribute.
|
||||
func amiBlockDeviceMappingHash(v interface{}) int {
|
||||
|
@ -453,13 +421,13 @@ func amiProductCodesHash(v interface{}) int {
|
|||
return hashcode.String(buf.String())
|
||||
}
|
||||
|
||||
// Generates a hash for the set hash function used by the tags
|
||||
// attribute.
|
||||
func amiTagsHash(v interface{}) int {
|
||||
var buf bytes.Buffer
|
||||
m := v.(map[string]interface{})
|
||||
// All keys added in alphabetical order.
|
||||
buf.WriteString(fmt.Sprintf("%s-", m["key"].(string)))
|
||||
buf.WriteString(fmt.Sprintf("%s-", m["value"].(string)))
|
||||
return hashcode.String(buf.String())
|
||||
func validateNameRegex(v interface{}, k string) (ws []string, errors []error) {
|
||||
value := v.(string)
|
||||
|
||||
if _, err := regexp.Compile(value); err != nil {
|
||||
errors = append(errors, fmt.Errorf(
|
||||
"%q contains an invalid regular expression: %s",
|
||||
k, err))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
|
|
@ -45,7 +45,7 @@ func TestAccAWSAmiDataSource_natInstance(t *testing.T) {
|
|||
resource.TestCheckResourceAttr("data.aws_ami.nat_ami", "state", "available"),
|
||||
resource.TestCheckResourceAttr("data.aws_ami.nat_ami", "state_reason.code", "UNSET"),
|
||||
resource.TestCheckResourceAttr("data.aws_ami.nat_ami", "state_reason.message", "UNSET"),
|
||||
resource.TestCheckResourceAttr("data.aws_ami.nat_ami", "tags.%", "0"),
|
||||
resource.TestCheckResourceAttr("data.aws_ami.nat_ami", "tags.#", "0"),
|
||||
resource.TestCheckResourceAttr("data.aws_ami.nat_ami", "virtualization_type", "hvm"),
|
||||
),
|
||||
},
|
||||
|
@ -82,7 +82,7 @@ func TestAccAWSAmiDataSource_windowsInstance(t *testing.T) {
|
|||
resource.TestCheckResourceAttr("data.aws_ami.windows_ami", "state", "available"),
|
||||
resource.TestCheckResourceAttr("data.aws_ami.windows_ami", "state_reason.code", "UNSET"),
|
||||
resource.TestCheckResourceAttr("data.aws_ami.windows_ami", "state_reason.message", "UNSET"),
|
||||
resource.TestCheckResourceAttr("data.aws_ami.windows_ami", "tags.%", "0"),
|
||||
resource.TestCheckResourceAttr("data.aws_ami.windows_ami", "tags.#", "0"),
|
||||
resource.TestCheckResourceAttr("data.aws_ami.windows_ami", "virtualization_type", "hvm"),
|
||||
),
|
||||
},
|
||||
|
@ -116,7 +116,7 @@ func TestAccAWSAmiDataSource_instanceStore(t *testing.T) {
|
|||
resource.TestCheckResourceAttr("data.aws_ami.instance_store_ami", "state", "available"),
|
||||
resource.TestCheckResourceAttr("data.aws_ami.instance_store_ami", "state_reason.code", "UNSET"),
|
||||
resource.TestCheckResourceAttr("data.aws_ami.instance_store_ami", "state_reason.message", "UNSET"),
|
||||
resource.TestCheckResourceAttr("data.aws_ami.instance_store_ami", "tags.%", "0"),
|
||||
resource.TestCheckResourceAttr("data.aws_ami.instance_store_ami", "tags.#", "0"),
|
||||
resource.TestCheckResourceAttr("data.aws_ami.instance_store_ami", "virtualization_type", "hvm"),
|
||||
),
|
||||
},
|
||||
|
@ -139,6 +139,73 @@ func TestAccAWSAmiDataSource_owners(t *testing.T) {
|
|||
})
|
||||
}
|
||||
|
||||
func TestAccAWSAmiDataSource_localNameFilter(t *testing.T) {
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: testAccCheckAwsAmiDataSourceNameRegexConfig,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAwsAmiDataSourceID("data.aws_ami.name_regex_filtered_ami"),
|
||||
resource.TestMatchResourceAttr("data.aws_ami.name_regex_filtered_ami", "image_id", regexp.MustCompile("^ami-")),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceValidateNameRegex(t *testing.T) {
|
||||
type testCases struct {
|
||||
Value string
|
||||
ErrCount int
|
||||
}
|
||||
|
||||
invalidCases := []testCases{
|
||||
{
|
||||
Value: `\`,
|
||||
ErrCount: 1,
|
||||
},
|
||||
{
|
||||
Value: `**`,
|
||||
ErrCount: 1,
|
||||
},
|
||||
{
|
||||
Value: `(.+`,
|
||||
ErrCount: 1,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range invalidCases {
|
||||
_, errors := validateNameRegex(tc.Value, "name_regex")
|
||||
if len(errors) != tc.ErrCount {
|
||||
t.Fatalf("Expected %q to trigger a validation error.", tc.Value)
|
||||
}
|
||||
}
|
||||
|
||||
validCases := []testCases{
|
||||
{
|
||||
Value: `\/`,
|
||||
ErrCount: 0,
|
||||
},
|
||||
{
|
||||
Value: `.*`,
|
||||
ErrCount: 0,
|
||||
},
|
||||
{
|
||||
Value: `\b(?:\d{1,3}\.){3}\d{1,3}\b`,
|
||||
ErrCount: 0,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range validCases {
|
||||
_, errors := validateNameRegex(tc.Value, "name_regex")
|
||||
if len(errors) != tc.ErrCount {
|
||||
t.Fatalf("Expected %q not to trigger a validation error.", tc.Value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func testAccCheckAwsAmiDataSourceDestroy(s *terraform.State) error {
|
||||
return nil
|
||||
}
|
||||
|
@ -245,3 +312,16 @@ data "aws_ami" "amazon_ami" {
|
|||
owners = ["amazon"]
|
||||
}
|
||||
`
|
||||
|
||||
// Testing name_regex parameter
|
||||
const testAccCheckAwsAmiDataSourceNameRegexConfig = `
|
||||
data "aws_ami" "name_regex_filtered_ami" {
|
||||
most_recent = true
|
||||
owners = ["amazon"]
|
||||
filter {
|
||||
name = "name"
|
||||
values = ["amzn-ami-*"]
|
||||
}
|
||||
name_regex = "^amzn-ami-\\d{3}[5].*-ecs-optimized"
|
||||
}
|
||||
`
|
||||
|
|
|
@ -0,0 +1,89 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/service/ec2"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func dataSourceAwsAvailabilityZone() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Read: dataSourceAwsAvailabilityZoneRead,
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"name": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"region": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"name_suffix": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"state": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func dataSourceAwsAvailabilityZoneRead(d *schema.ResourceData, meta interface{}) error {
|
||||
conn := meta.(*AWSClient).ec2conn
|
||||
|
||||
req := &ec2.DescribeAvailabilityZonesInput{}
|
||||
|
||||
if name := d.Get("name"); name != "" {
|
||||
req.ZoneNames = []*string{aws.String(name.(string))}
|
||||
}
|
||||
|
||||
req.Filters = buildEC2AttributeFilterList(
|
||||
map[string]string{
|
||||
"state": d.Get("state").(string),
|
||||
},
|
||||
)
|
||||
if len(req.Filters) == 0 {
|
||||
// Don't send an empty filters list; the EC2 API won't accept it.
|
||||
req.Filters = nil
|
||||
}
|
||||
|
||||
log.Printf("[DEBUG] DescribeAvailabilityZones %s\n", req)
|
||||
resp, err := conn.DescribeAvailabilityZones(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if resp == nil || len(resp.AvailabilityZones) == 0 {
|
||||
return fmt.Errorf("no matching AZ found")
|
||||
}
|
||||
if len(resp.AvailabilityZones) > 1 {
|
||||
return fmt.Errorf("multiple AZs matched; use additional constraints to reduce matches to a single AZ")
|
||||
}
|
||||
|
||||
az := resp.AvailabilityZones[0]
|
||||
|
||||
// As a convenience when working with AZs generically, we expose
|
||||
// the AZ suffix alone, without the region name.
|
||||
// This can be used e.g. to create lookup tables by AZ letter that
|
||||
// work regardless of region.
|
||||
nameSuffix := (*az.ZoneName)[len(*az.RegionName):]
|
||||
|
||||
d.SetId(*az.ZoneName)
|
||||
d.Set("id", az.ZoneName)
|
||||
d.Set("name", az.ZoneName)
|
||||
d.Set("name_suffix", nameSuffix)
|
||||
d.Set("region", az.RegionName)
|
||||
d.Set("state", az.State)
|
||||
|
||||
return nil
|
||||
}
|
|
@ -0,0 +1,57 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
)
|
||||
|
||||
func TestAccDataSourceAwsAvailabilityZone(t *testing.T) {
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: testAccDataSourceAwsAvailabilityZoneConfig,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccDataSourceAwsAvailabilityZoneCheck("data.aws_availability_zone.by_name"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func testAccDataSourceAwsAvailabilityZoneCheck(name string) resource.TestCheckFunc {
|
||||
return func(s *terraform.State) error {
|
||||
rs, ok := s.RootModule().Resources[name]
|
||||
if !ok {
|
||||
return fmt.Errorf("root module has no resource called %s", name)
|
||||
}
|
||||
|
||||
attr := rs.Primary.Attributes
|
||||
|
||||
if attr["name"] != "us-west-2a" {
|
||||
return fmt.Errorf("bad name %s", attr["name"])
|
||||
}
|
||||
if attr["name_suffix"] != "a" {
|
||||
return fmt.Errorf("bad name_suffix %s", attr["name_suffix"])
|
||||
}
|
||||
if attr["region"] != "us-west-2" {
|
||||
return fmt.Errorf("bad region %s", attr["region"])
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
const testAccDataSourceAwsAvailabilityZoneConfig = `
|
||||
provider "aws" {
|
||||
region = "us-west-2"
|
||||
}
|
||||
|
||||
data "aws_availability_zone" "by_name" {
|
||||
name = "us-west-2a"
|
||||
}
|
||||
`
|
|
@ -0,0 +1,87 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"sort"
|
||||
"time"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/service/ec2"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func dataSourceAwsAvailabilityZones() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Read: dataSourceAwsAvailabilityZonesRead,
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"names": &schema.Schema{
|
||||
Type: schema.TypeList,
|
||||
Computed: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
},
|
||||
"state": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
ValidateFunc: validateStateType,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func dataSourceAwsAvailabilityZonesRead(d *schema.ResourceData, meta interface{}) error {
|
||||
conn := meta.(*AWSClient).ec2conn
|
||||
|
||||
log.Printf("[DEBUG] Reading Availability Zones.")
|
||||
d.SetId(time.Now().UTC().String())
|
||||
|
||||
request := &ec2.DescribeAvailabilityZonesInput{}
|
||||
|
||||
if v, ok := d.GetOk("state"); ok {
|
||||
request.Filters = []*ec2.Filter{
|
||||
&ec2.Filter{
|
||||
Name: aws.String("state"),
|
||||
Values: []*string{aws.String(v.(string))},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
log.Printf("[DEBUG] Availability Zones request options: %#v", *request)
|
||||
|
||||
resp, err := conn.DescribeAvailabilityZones(request)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error fetching Availability Zones: %s", err)
|
||||
}
|
||||
|
||||
raw := make([]string, len(resp.AvailabilityZones))
|
||||
for i, v := range resp.AvailabilityZones {
|
||||
raw[i] = *v.ZoneName
|
||||
}
|
||||
|
||||
sort.Strings(raw)
|
||||
|
||||
if err := d.Set("names", raw); err != nil {
|
||||
return fmt.Errorf("[WARN] Error setting Availability Zones: %s", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func validateStateType(v interface{}, k string) (ws []string, errors []error) {
|
||||
value := v.(string)
|
||||
|
||||
validState := map[string]bool{
|
||||
"available": true,
|
||||
"information": true,
|
||||
"impaired": true,
|
||||
"unavailable": true,
|
||||
}
|
||||
|
||||
if !validState[value] {
|
||||
errors = append(errors, fmt.Errorf(
|
||||
"%q contains an invalid Availability Zone state %q. Valid states are: %q, %q, %q and %q.",
|
||||
k, value, "available", "information", "impaired", "unavailable"))
|
||||
}
|
||||
return
|
||||
}
|
|
@ -0,0 +1,142 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strconv"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
)
|
||||
|
||||
func TestAccAWSAvailabilityZones_basic(t *testing.T) {
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: testAccCheckAwsAvailabilityZonesConfig,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAwsAvailabilityZonesMeta("data.aws_availability_zones.availability_zones"),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: testAccCheckAwsAvailabilityZonesStateConfig,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAwsAvailabilityZoneState("data.aws_availability_zones.state_filter"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestResourceCheckAwsAvailabilityZones_validateStateType(t *testing.T) {
|
||||
_, errors := validateStateType("incorrect", "state")
|
||||
if len(errors) == 0 {
|
||||
t.Fatalf("Expected to trigger a validation error")
|
||||
}
|
||||
|
||||
var testCases = []struct {
|
||||
Value string
|
||||
ErrCount int
|
||||
}{
|
||||
{
|
||||
Value: "available",
|
||||
ErrCount: 0,
|
||||
},
|
||||
{
|
||||
Value: "unavailable",
|
||||
ErrCount: 0,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
_, errors := validateStateType(tc.Value, "state")
|
||||
if len(errors) != tc.ErrCount {
|
||||
t.Fatalf("Expected %q not to trigger a validation error.", tc.Value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func testAccCheckAwsAvailabilityZonesMeta(n string) resource.TestCheckFunc {
|
||||
return func(s *terraform.State) error {
|
||||
rs, ok := s.RootModule().Resources[n]
|
||||
if !ok {
|
||||
return fmt.Errorf("Can't find AZ resource: %s", n)
|
||||
}
|
||||
|
||||
if rs.Primary.ID == "" {
|
||||
return fmt.Errorf("AZ resource ID not set.")
|
||||
}
|
||||
|
||||
actual, err := testAccCheckAwsAvailabilityZonesBuildAvailable(rs.Primary.Attributes)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
expected := actual
|
||||
sort.Strings(expected)
|
||||
if reflect.DeepEqual(expected, actual) != true {
|
||||
return fmt.Errorf("AZs not sorted - expected %v, got %v", expected, actual)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func testAccCheckAwsAvailabilityZoneState(n string) resource.TestCheckFunc {
|
||||
return func(s *terraform.State) error {
|
||||
rs, ok := s.RootModule().Resources[n]
|
||||
if !ok {
|
||||
return fmt.Errorf("Can't find AZ resource: %s", n)
|
||||
}
|
||||
|
||||
if rs.Primary.ID == "" {
|
||||
return fmt.Errorf("AZ resource ID not set.")
|
||||
}
|
||||
|
||||
if _, ok := rs.Primary.Attributes["state"]; !ok {
|
||||
return fmt.Errorf("AZs state filter is missing, should be set.")
|
||||
}
|
||||
|
||||
_, err := testAccCheckAwsAvailabilityZonesBuildAvailable(rs.Primary.Attributes)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func testAccCheckAwsAvailabilityZonesBuildAvailable(attrs map[string]string) ([]string, error) {
|
||||
v, ok := attrs["names.#"]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("Available AZ list is missing.")
|
||||
}
|
||||
qty, err := strconv.Atoi(v)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if qty < 1 {
|
||||
return nil, fmt.Errorf("No AZs found in region, this is probably a bug.")
|
||||
}
|
||||
zones := make([]string, qty)
|
||||
for n := range zones {
|
||||
zone, ok := attrs["names."+strconv.Itoa(n)]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("AZ list corrupt, this is definitely a bug.")
|
||||
}
|
||||
zones[n] = zone
|
||||
}
|
||||
return zones, nil
|
||||
}
|
||||
|
||||
const testAccCheckAwsAvailabilityZonesConfig = `
|
||||
data "aws_availability_zones" "availability_zones" { }
|
||||
`
|
||||
|
||||
const testAccCheckAwsAvailabilityZonesStateConfig = `
|
||||
data "aws_availability_zones" "state_filter" {
|
||||
state = "available"
|
||||
}
|
||||
`
|
|
@ -0,0 +1,29 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
// See http://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/billing-getting-started.html#step-2
|
||||
var billingAccountId = "386209384616"
|
||||
|
||||
func dataSourceAwsBillingServiceAccount() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Read: dataSourceAwsBillingServiceAccountRead,
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"arn": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func dataSourceAwsBillingServiceAccountRead(d *schema.ResourceData, meta interface{}) error {
|
||||
d.SetId(billingAccountId)
|
||||
|
||||
d.Set("arn", "arn:aws:iam::"+billingAccountId+":root")
|
||||
|
||||
return nil
|
||||
}
|
|
@ -0,0 +1,27 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
)
|
||||
|
||||
func TestAccAWSBillingServiceAccount_basic(t *testing.T) {
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: testAccCheckAwsBillingServiceAccountConfig,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr("data.aws_billing_service_account.main", "id", "386209384616"),
|
||||
resource.TestCheckResourceAttr("data.aws_billing_service_account.main", "arn", "arn:aws:iam::386209384616:root"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
const testAccCheckAwsBillingServiceAccountConfig = `
|
||||
data "aws_billing_service_account" "main" { }
|
||||
`
|
|
@ -0,0 +1,40 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"time"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func dataSourceAwsCallerIdentity() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Read: dataSourceAwsCallerIdentityRead,
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"account_id": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func dataSourceAwsCallerIdentityRead(d *schema.ResourceData, meta interface{}) error {
|
||||
client := meta.(*AWSClient)
|
||||
|
||||
log.Printf("[DEBUG] Reading Caller Identity.")
|
||||
d.SetId(time.Now().UTC().String())
|
||||
|
||||
if client.accountid == "" {
|
||||
log.Println("[DEBUG] No Account ID available, failing")
|
||||
return fmt.Errorf("No AWS Account ID is available to the provider. Please ensure that\n" +
|
||||
"skip_requesting_account_id is not set on the AWS provider.")
|
||||
}
|
||||
|
||||
log.Printf("[DEBUG] Setting AWS Account ID to %s.", client.accountid)
|
||||
d.Set("account_id", meta.(*AWSClient).accountid)
|
||||
|
||||
return nil
|
||||
}
|
|
@ -0,0 +1,48 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
)
|
||||
|
||||
func TestAccAWSCallerIdentity_basic(t *testing.T) {
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
Config: testAccCheckAwsCallerIdentityConfig_basic,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAwsCallerIdentityAccountId("data.aws_caller_identity.current"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func testAccCheckAwsCallerIdentityAccountId(n string) resource.TestCheckFunc {
|
||||
return func(s *terraform.State) error {
|
||||
rs, ok := s.RootModule().Resources[n]
|
||||
if !ok {
|
||||
return fmt.Errorf("Can't find AccountID resource: %s", n)
|
||||
}
|
||||
|
||||
if rs.Primary.ID == "" {
|
||||
return fmt.Errorf("Account Id resource ID not set.")
|
||||
}
|
||||
|
||||
expected := testAccProvider.Meta().(*AWSClient).accountid
|
||||
if rs.Primary.Attributes["account_id"] != expected {
|
||||
return fmt.Errorf("Incorrect Account ID: expected %q, got %q", expected, rs.Primary.ID)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
const testAccCheckAwsCallerIdentityConfig_basic = `
|
||||
data "aws_caller_identity" "current" { }
|
||||
`
|
|
@ -0,0 +1,117 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/service/cloudformation"
|
||||
"github.com/hashicorp/errwrap"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func dataSourceAwsCloudFormationStack() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Read: dataSourceAwsCloudFormationStackRead,
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"name": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
"template_body": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
StateFunc: func(v interface{}) string {
|
||||
json, _ := normalizeJsonString(v)
|
||||
return json
|
||||
},
|
||||
},
|
||||
"capabilities": {
|
||||
Type: schema.TypeSet,
|
||||
Computed: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
Set: schema.HashString,
|
||||
},
|
||||
"description": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"disable_rollback": {
|
||||
Type: schema.TypeBool,
|
||||
Computed: true,
|
||||
},
|
||||
"notification_arns": {
|
||||
Type: schema.TypeSet,
|
||||
Computed: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
Set: schema.HashString,
|
||||
},
|
||||
"parameters": {
|
||||
Type: schema.TypeMap,
|
||||
Computed: true,
|
||||
},
|
||||
"outputs": {
|
||||
Type: schema.TypeMap,
|
||||
Computed: true,
|
||||
},
|
||||
"timeout_in_minutes": {
|
||||
Type: schema.TypeInt,
|
||||
Computed: true,
|
||||
},
|
||||
"tags": {
|
||||
Type: schema.TypeMap,
|
||||
Computed: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func dataSourceAwsCloudFormationStackRead(d *schema.ResourceData, meta interface{}) error {
|
||||
conn := meta.(*AWSClient).cfconn
|
||||
name := d.Get("name").(string)
|
||||
input := cloudformation.DescribeStacksInput{
|
||||
StackName: aws.String(name),
|
||||
}
|
||||
|
||||
out, err := conn.DescribeStacks(&input)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed describing CloudFormation stack (%s): %s", name, err)
|
||||
}
|
||||
if l := len(out.Stacks); l != 1 {
|
||||
return fmt.Errorf("Expected 1 CloudFormation stack (%s), found %d", name, l)
|
||||
}
|
||||
stack := out.Stacks[0]
|
||||
d.SetId(*stack.StackId)
|
||||
|
||||
d.Set("description", stack.Description)
|
||||
d.Set("disable_rollback", stack.DisableRollback)
|
||||
d.Set("timeout_in_minutes", stack.TimeoutInMinutes)
|
||||
|
||||
if len(stack.NotificationARNs) > 0 {
|
||||
d.Set("notification_arns", schema.NewSet(schema.HashString, flattenStringList(stack.NotificationARNs)))
|
||||
}
|
||||
|
||||
d.Set("parameters", flattenAllCloudFormationParameters(stack.Parameters))
|
||||
d.Set("tags", flattenCloudFormationTags(stack.Tags))
|
||||
d.Set("outputs", flattenCloudFormationOutputs(stack.Outputs))
|
||||
|
||||
if len(stack.Capabilities) > 0 {
|
||||
d.Set("capabilities", schema.NewSet(schema.HashString, flattenStringList(stack.Capabilities)))
|
||||
}
|
||||
|
||||
tInput := cloudformation.GetTemplateInput{
|
||||
StackName: aws.String(name),
|
||||
}
|
||||
tOut, err := conn.GetTemplate(&tInput)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
template, err := normalizeJsonString(*tOut.TemplateBody)
|
||||
if err != nil {
|
||||
return errwrap.Wrapf("template body contains an invalid JSON: {{err}}", err)
|
||||
}
|
||||
d.Set("template_body", template)
|
||||
|
||||
return nil
|
||||
}
|
|
@ -0,0 +1,78 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
)
|
||||
|
||||
func TestAccAWSCloudFormationStack_dataSource_basic(t *testing.T) {
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: testAccCheckAwsCloudFormationStackDataSourceConfig_basic,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr("data.aws_cloudformation_stack.network", "outputs.%", "1"),
|
||||
resource.TestMatchResourceAttr("data.aws_cloudformation_stack.network", "outputs.VPCId",
|
||||
regexp.MustCompile("^vpc-[a-z0-9]{8}$")),
|
||||
resource.TestCheckResourceAttr("data.aws_cloudformation_stack.network", "capabilities.#", "0"),
|
||||
resource.TestCheckResourceAttr("data.aws_cloudformation_stack.network", "disable_rollback", "false"),
|
||||
resource.TestCheckResourceAttr("data.aws_cloudformation_stack.network", "notification_arns.#", "0"),
|
||||
resource.TestCheckResourceAttr("data.aws_cloudformation_stack.network", "parameters.%", "1"),
|
||||
resource.TestCheckResourceAttr("data.aws_cloudformation_stack.network", "parameters.CIDR", "10.10.10.0/24"),
|
||||
resource.TestCheckResourceAttr("data.aws_cloudformation_stack.network", "timeout_in_minutes", "6"),
|
||||
resource.TestCheckResourceAttr("data.aws_cloudformation_stack.network", "tags.%", "2"),
|
||||
resource.TestCheckResourceAttr("data.aws_cloudformation_stack.network", "tags.Name", "Form the Cloud"),
|
||||
resource.TestCheckResourceAttr("data.aws_cloudformation_stack.network", "tags.Second", "meh"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
const testAccCheckAwsCloudFormationStackDataSourceConfig_basic = `
|
||||
resource "aws_cloudformation_stack" "cfs" {
|
||||
name = "tf-acc-ds-networking-stack"
|
||||
parameters {
|
||||
CIDR = "10.10.10.0/24"
|
||||
}
|
||||
timeout_in_minutes = 6
|
||||
template_body = <<STACK
|
||||
{
|
||||
"Parameters": {
|
||||
"CIDR": {
|
||||
"Type": "String"
|
||||
}
|
||||
},
|
||||
"Resources" : {
|
||||
"myvpc": {
|
||||
"Type" : "AWS::EC2::VPC",
|
||||
"Properties" : {
|
||||
"CidrBlock" : { "Ref" : "CIDR" },
|
||||
"Tags" : [
|
||||
{"Key": "Name", "Value": "Primary_CF_VPC"}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"Outputs" : {
|
||||
"VPCId" : {
|
||||
"Value" : { "Ref" : "myvpc" },
|
||||
"Description" : "VPC ID"
|
||||
}
|
||||
}
|
||||
}
|
||||
STACK
|
||||
tags {
|
||||
Name = "Form the Cloud"
|
||||
Second = "meh"
|
||||
}
|
||||
}
|
||||
|
||||
data "aws_cloudformation_stack" "network" {
|
||||
name = "${aws_cloudformation_stack.cfs.name}"
|
||||
}
|
||||
`
|
|
@ -0,0 +1,91 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/service/ec2"
|
||||
"github.com/hashicorp/terraform/helper/hashcode"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func dataSourceTagsHash(v interface{}) int {
|
||||
var buf bytes.Buffer
|
||||
m := v.(map[string]interface{})
|
||||
buf.WriteString(fmt.Sprintf("%s-", m["key"].(string)))
|
||||
buf.WriteString(fmt.Sprintf("%s-", m["value"].(string)))
|
||||
return hashcode.String(buf.String())
|
||||
}
|
||||
|
||||
func dataSourceTags(m []*ec2.Tag) *schema.Set {
|
||||
s := &schema.Set{
|
||||
F: dataSourceTagsHash,
|
||||
}
|
||||
for _, v := range m {
|
||||
tag := map[string]interface{}{
|
||||
"key": *v.Key,
|
||||
"value": *v.Value,
|
||||
}
|
||||
s.Add(tag)
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func buildAwsDataSourceFilters(set *schema.Set) []*ec2.Filter {
|
||||
var filters []*ec2.Filter
|
||||
for _, v := range set.List() {
|
||||
m := v.(map[string]interface{})
|
||||
var filterValues []*string
|
||||
for _, e := range m["values"].([]interface{}) {
|
||||
filterValues = append(filterValues, aws.String(e.(string)))
|
||||
}
|
||||
filters = append(filters, &ec2.Filter{
|
||||
Name: aws.String(m["name"].(string)),
|
||||
Values: filterValues,
|
||||
})
|
||||
}
|
||||
return filters
|
||||
}
|
||||
|
||||
func dataSourceFiltersSchema() *schema.Schema {
|
||||
return &schema.Schema{
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"name": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
|
||||
"values": {
|
||||
Type: schema.TypeList,
|
||||
Required: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func dataSourceTagsSchema() *schema.Schema {
|
||||
return &schema.Schema{
|
||||
Type: schema.TypeSet,
|
||||
Computed: true,
|
||||
Set: dataSourceTagsHash,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"key": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"value": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
|
@ -0,0 +1,143 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/aws/aws-sdk-go/service/ec2"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func dataSourceAwsEbsSnapshot() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Read: dataSourceAwsEbsSnapshotRead,
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
//selection criteria
|
||||
"filter": dataSourceFiltersSchema(),
|
||||
|
||||
"owners": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
},
|
||||
"snapshot_ids": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
},
|
||||
"restorable_by_user_ids": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
ForceNew: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
},
|
||||
//Computed values returned
|
||||
"snapshot_id": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"volume_id": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"state": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"owner_id": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"owner_alias": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"encrypted": {
|
||||
Type: schema.TypeBool,
|
||||
Computed: true,
|
||||
},
|
||||
"description": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"volume_size": {
|
||||
Type: schema.TypeInt,
|
||||
Computed: true,
|
||||
},
|
||||
"kms_key_id": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"data_encryption_key_id": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"tags": dataSourceTagsSchema(),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func dataSourceAwsEbsSnapshotRead(d *schema.ResourceData, meta interface{}) error {
|
||||
conn := meta.(*AWSClient).ec2conn
|
||||
|
||||
restorableUsers, restorableUsersOk := d.GetOk("restorable_by_user_ids")
|
||||
filters, filtersOk := d.GetOk("filter")
|
||||
snapshotIds, snapshotIdsOk := d.GetOk("snapshot_ids")
|
||||
owners, ownersOk := d.GetOk("owners")
|
||||
|
||||
if restorableUsers == false && filtersOk == false && snapshotIds == false && ownersOk == false {
|
||||
return fmt.Errorf("One of snapshot_ids, filters, restorable_by_user_ids, or owners must be assigned")
|
||||
}
|
||||
|
||||
params := &ec2.DescribeSnapshotsInput{}
|
||||
if restorableUsersOk {
|
||||
params.RestorableByUserIds = expandStringList(restorableUsers.([]interface{}))
|
||||
}
|
||||
if filtersOk {
|
||||
params.Filters = buildAwsDataSourceFilters(filters.(*schema.Set))
|
||||
}
|
||||
if ownersOk {
|
||||
params.OwnerIds = expandStringList(owners.([]interface{}))
|
||||
}
|
||||
if snapshotIdsOk {
|
||||
params.SnapshotIds = expandStringList(snapshotIds.([]interface{}))
|
||||
}
|
||||
|
||||
resp, err := conn.DescribeSnapshots(params)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(resp.Snapshots) < 1 {
|
||||
return fmt.Errorf("Your query returned no results. Please change your search criteria and try again.")
|
||||
}
|
||||
|
||||
if len(resp.Snapshots) > 1 {
|
||||
return fmt.Errorf("Your query returned more than one result. Please try a more specific search criteria.")
|
||||
}
|
||||
|
||||
//Single Snapshot found so set to state
|
||||
return snapshotDescriptionAttributes(d, resp.Snapshots[0])
|
||||
}
|
||||
|
||||
func snapshotDescriptionAttributes(d *schema.ResourceData, snapshot *ec2.Snapshot) error {
|
||||
d.SetId(*snapshot.SnapshotId)
|
||||
d.Set("snapshot_id", snapshot.SnapshotId)
|
||||
d.Set("volume_id", snapshot.VolumeId)
|
||||
d.Set("data_encryption_key_id", snapshot.DataEncryptionKeyId)
|
||||
d.Set("description", snapshot.Description)
|
||||
d.Set("encrypted", snapshot.Encrypted)
|
||||
d.Set("kms_key_id", snapshot.KmsKeyId)
|
||||
d.Set("volume_size", snapshot.VolumeSize)
|
||||
d.Set("state", snapshot.State)
|
||||
d.Set("owner_id", snapshot.OwnerId)
|
||||
d.Set("owner_alias", snapshot.OwnerAlias)
|
||||
|
||||
if err := d.Set("tags", dataSourceTags(snapshot.Tags)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
|
@ -0,0 +1,97 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
)
|
||||
|
||||
func TestAccAWSEbsSnapshotDataSource_basic(t *testing.T) {
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
Config: testAccCheckAwsEbsSnapshotDataSourceConfig,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAwsEbsSnapshotDataSourceID("data.aws_ebs_snapshot.snapshot"),
|
||||
resource.TestCheckResourceAttr("data.aws_ebs_snapshot.snapshot", "volume_size", "40"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestAccAWSEbsSnapshotDataSource_multipleFilters(t *testing.T) {
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
Config: testAccCheckAwsEbsSnapshotDataSourceConfigWithMultipleFilters,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAwsEbsSnapshotDataSourceID("data.aws_ebs_snapshot.snapshot"),
|
||||
resource.TestCheckResourceAttr("data.aws_ebs_snapshot.snapshot", "volume_size", "10"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func testAccCheckAwsEbsSnapshotDataSourceID(n string) resource.TestCheckFunc {
|
||||
return func(s *terraform.State) error {
|
||||
rs, ok := s.RootModule().Resources[n]
|
||||
if !ok {
|
||||
return fmt.Errorf("Can't find Volume data source: %s", n)
|
||||
}
|
||||
|
||||
if rs.Primary.ID == "" {
|
||||
return fmt.Errorf("Snapshot data source ID not set")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
const testAccCheckAwsEbsSnapshotDataSourceConfig = `
|
||||
resource "aws_ebs_volume" "example" {
|
||||
availability_zone = "us-west-2a"
|
||||
type = "gp2"
|
||||
size = 40
|
||||
tags {
|
||||
Name = "External Volume"
|
||||
}
|
||||
}
|
||||
|
||||
resource "aws_ebs_snapshot" "snapshot" {
|
||||
volume_id = "${aws_ebs_volume.example.id}"
|
||||
}
|
||||
|
||||
data "aws_ebs_snapshot" "snapshot" {
|
||||
snapshot_ids = ["${aws_ebs_snapshot.snapshot.id}"]
|
||||
}
|
||||
`
|
||||
|
||||
const testAccCheckAwsEbsSnapshotDataSourceConfigWithMultipleFilters = `
|
||||
resource "aws_ebs_volume" "external1" {
|
||||
availability_zone = "us-west-2a"
|
||||
type = "gp2"
|
||||
size = 10
|
||||
tags {
|
||||
Name = "External Volume 1"
|
||||
}
|
||||
}
|
||||
|
||||
resource "aws_ebs_snapshot" "snapshot" {
|
||||
volume_id = "${aws_ebs_volume.external1.id}"
|
||||
}
|
||||
|
||||
data "aws_ebs_snapshot" "snapshot" {
|
||||
snapshot_ids = ["${aws_ebs_snapshot.snapshot.id}"]
|
||||
filter {
|
||||
name = "volume-size"
|
||||
values = ["10"]
|
||||
}
|
||||
}
|
||||
`
|
|
@ -0,0 +1,136 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"sort"
|
||||
|
||||
"github.com/aws/aws-sdk-go/service/ec2"
|
||||
"github.com/davecgh/go-spew/spew"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func dataSourceAwsEbsVolume() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Read: dataSourceAwsEbsVolumeRead,
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"filter": dataSourceFiltersSchema(),
|
||||
"most_recent": {
|
||||
Type: schema.TypeBool,
|
||||
Optional: true,
|
||||
Default: false,
|
||||
ForceNew: true,
|
||||
},
|
||||
"availability_zone": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"encrypted": {
|
||||
Type: schema.TypeBool,
|
||||
Computed: true,
|
||||
},
|
||||
"iops": {
|
||||
Type: schema.TypeInt,
|
||||
Computed: true,
|
||||
},
|
||||
"volume_type": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"size": {
|
||||
Type: schema.TypeInt,
|
||||
Computed: true,
|
||||
},
|
||||
"snapshot_id": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"kms_key_id": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"volume_id": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"tags": dataSourceTagsSchema(),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func dataSourceAwsEbsVolumeRead(d *schema.ResourceData, meta interface{}) error {
|
||||
conn := meta.(*AWSClient).ec2conn
|
||||
|
||||
filters, filtersOk := d.GetOk("filter")
|
||||
|
||||
params := &ec2.DescribeVolumesInput{}
|
||||
if filtersOk {
|
||||
params.Filters = buildAwsDataSourceFilters(filters.(*schema.Set))
|
||||
}
|
||||
|
||||
resp, err := conn.DescribeVolumes(params)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
log.Printf("Found These Volumes %s", spew.Sdump(resp.Volumes))
|
||||
|
||||
filteredVolumes := resp.Volumes[:]
|
||||
|
||||
var volume *ec2.Volume
|
||||
if len(filteredVolumes) < 1 {
|
||||
return fmt.Errorf("Your query returned no results. Please change your search criteria and try again.")
|
||||
}
|
||||
|
||||
if len(filteredVolumes) > 1 {
|
||||
recent := d.Get("most_recent").(bool)
|
||||
log.Printf("[DEBUG] aws_ebs_volume - multiple results found and `most_recent` is set to: %t", recent)
|
||||
if recent {
|
||||
volume = mostRecentVolume(filteredVolumes)
|
||||
} else {
|
||||
return fmt.Errorf("Your query returned more than one result. Please try a more " +
|
||||
"specific search criteria, or set `most_recent` attribute to true.")
|
||||
}
|
||||
} else {
|
||||
// Query returned single result.
|
||||
volume = filteredVolumes[0]
|
||||
}
|
||||
|
||||
log.Printf("[DEBUG] aws_ebs_volume - Single Volume found: %s", *volume.VolumeId)
|
||||
return volumeDescriptionAttributes(d, volume)
|
||||
}
|
||||
|
||||
type volumeSort []*ec2.Volume
|
||||
|
||||
func (a volumeSort) Len() int { return len(a) }
|
||||
func (a volumeSort) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
|
||||
func (a volumeSort) Less(i, j int) bool {
|
||||
itime := *a[i].CreateTime
|
||||
jtime := *a[j].CreateTime
|
||||
return itime.Unix() < jtime.Unix()
|
||||
}
|
||||
|
||||
func mostRecentVolume(volumes []*ec2.Volume) *ec2.Volume {
|
||||
sortedVolumes := volumes
|
||||
sort.Sort(volumeSort(sortedVolumes))
|
||||
return sortedVolumes[len(sortedVolumes)-1]
|
||||
}
|
||||
|
||||
func volumeDescriptionAttributes(d *schema.ResourceData, volume *ec2.Volume) error {
|
||||
d.SetId(*volume.VolumeId)
|
||||
d.Set("volume_id", volume.VolumeId)
|
||||
d.Set("availability_zone", volume.AvailabilityZone)
|
||||
d.Set("encrypted", volume.Encrypted)
|
||||
d.Set("iops", volume.Iops)
|
||||
d.Set("kms_key_id", volume.KmsKeyId)
|
||||
d.Set("size", volume.Size)
|
||||
d.Set("snapshot_id", volume.SnapshotId)
|
||||
d.Set("volume_type", volume.VolumeType)
|
||||
|
||||
if err := d.Set("tags", dataSourceTags(volume.Tags)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
|
@ -0,0 +1,106 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
)
|
||||
|
||||
func TestAccAWSEbsVolumeDataSource_basic(t *testing.T) {
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
Config: testAccCheckAwsEbsVolumeDataSourceConfig,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAwsEbsVolumeDataSourceID("data.aws_ebs_volume.ebs_volume"),
|
||||
resource.TestCheckResourceAttr("data.aws_ebs_volume.ebs_volume", "size", "40"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestAccAWSEbsVolumeDataSource_multipleFilters(t *testing.T) {
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
Config: testAccCheckAwsEbsVolumeDataSourceConfigWithMultipleFilters,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAwsEbsVolumeDataSourceID("data.aws_ebs_volume.ebs_volume"),
|
||||
resource.TestCheckResourceAttr("data.aws_ebs_volume.ebs_volume", "size", "10"),
|
||||
resource.TestCheckResourceAttr("data.aws_ebs_volume.ebs_volume", "volume_type", "gp2"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func testAccCheckAwsEbsVolumeDataSourceID(n string) resource.TestCheckFunc {
|
||||
return func(s *terraform.State) error {
|
||||
rs, ok := s.RootModule().Resources[n]
|
||||
if !ok {
|
||||
return fmt.Errorf("Can't find Volume data source: %s", n)
|
||||
}
|
||||
|
||||
if rs.Primary.ID == "" {
|
||||
return fmt.Errorf("Volume data source ID not set")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
const testAccCheckAwsEbsVolumeDataSourceConfig = `
|
||||
resource "aws_ebs_volume" "example" {
|
||||
availability_zone = "us-west-2a"
|
||||
type = "gp2"
|
||||
size = 40
|
||||
tags {
|
||||
Name = "External Volume"
|
||||
}
|
||||
}
|
||||
|
||||
data "aws_ebs_volume" "ebs_volume" {
|
||||
most_recent = true
|
||||
filter {
|
||||
name = "tag:Name"
|
||||
values = ["External Volume"]
|
||||
}
|
||||
filter {
|
||||
name = "volume-type"
|
||||
values = ["${aws_ebs_volume.example.type}"]
|
||||
}
|
||||
}
|
||||
`
|
||||
|
||||
const testAccCheckAwsEbsVolumeDataSourceConfigWithMultipleFilters = `
|
||||
resource "aws_ebs_volume" "external1" {
|
||||
availability_zone = "us-west-2a"
|
||||
type = "gp2"
|
||||
size = 10
|
||||
tags {
|
||||
Name = "External Volume 1"
|
||||
}
|
||||
}
|
||||
|
||||
data "aws_ebs_volume" "ebs_volume" {
|
||||
most_recent = true
|
||||
filter {
|
||||
name = "tag:Name"
|
||||
values = ["External Volume 1"]
|
||||
}
|
||||
filter {
|
||||
name = "size"
|
||||
values = ["${aws_ebs_volume.external1.size}"]
|
||||
}
|
||||
filter {
|
||||
name = "volume-type"
|
||||
values = ["${aws_ebs_volume.external1.type}"]
|
||||
}
|
||||
}
|
||||
`
|
|
@ -0,0 +1,107 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/service/ecs"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func dataSourceAwsEcsContainerDefinition() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Read: dataSourceAwsEcsContainerDefinitionRead,
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"task_definition": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
"container_name": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
// Computed values.
|
||||
"image": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"image_digest": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"cpu": &schema.Schema{
|
||||
Type: schema.TypeInt,
|
||||
Computed: true,
|
||||
},
|
||||
"memory": &schema.Schema{
|
||||
Type: schema.TypeInt,
|
||||
Computed: true,
|
||||
},
|
||||
"memory_reservation": &schema.Schema{
|
||||
Type: schema.TypeInt,
|
||||
Computed: true,
|
||||
},
|
||||
"disable_networking": &schema.Schema{
|
||||
Type: schema.TypeBool,
|
||||
Computed: true,
|
||||
},
|
||||
"docker_labels": &schema.Schema{
|
||||
Type: schema.TypeMap,
|
||||
Computed: true,
|
||||
Elem: schema.TypeString,
|
||||
},
|
||||
"environment": &schema.Schema{
|
||||
Type: schema.TypeMap,
|
||||
Computed: true,
|
||||
Elem: schema.TypeString,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func dataSourceAwsEcsContainerDefinitionRead(d *schema.ResourceData, meta interface{}) error {
|
||||
conn := meta.(*AWSClient).ecsconn
|
||||
|
||||
desc, err := conn.DescribeTaskDefinition(&ecs.DescribeTaskDefinitionInput{
|
||||
TaskDefinition: aws.String(d.Get("task_definition").(string)),
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
taskDefinition := *desc.TaskDefinition
|
||||
for _, def := range taskDefinition.ContainerDefinitions {
|
||||
if aws.StringValue(def.Name) != d.Get("container_name").(string) {
|
||||
continue
|
||||
}
|
||||
|
||||
d.SetId(fmt.Sprintf("%s/%s", aws.StringValue(taskDefinition.TaskDefinitionArn), d.Get("container_name").(string)))
|
||||
d.Set("image", aws.StringValue(def.Image))
|
||||
image := aws.StringValue(def.Image)
|
||||
if strings.Contains(image, ":") {
|
||||
d.Set("image_digest", strings.Split(image, ":")[1])
|
||||
}
|
||||
d.Set("cpu", aws.Int64Value(def.Cpu))
|
||||
d.Set("memory", aws.Int64Value(def.Memory))
|
||||
d.Set("memory_reservation", aws.Int64Value(def.MemoryReservation))
|
||||
d.Set("disable_networking", aws.BoolValue(def.DisableNetworking))
|
||||
d.Set("docker_labels", aws.StringValueMap(def.DockerLabels))
|
||||
|
||||
var environment = map[string]string{}
|
||||
for _, keyValuePair := range def.Environment {
|
||||
environment[aws.StringValue(keyValuePair.Name)] = aws.StringValue(keyValuePair.Value)
|
||||
}
|
||||
d.Set("environment", environment)
|
||||
}
|
||||
|
||||
if d.Id() == "" {
|
||||
return fmt.Errorf("container with name %q not found in task definition %q", d.Get("container_name").(string), d.Get("task_definition").(string))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
|
@ -0,0 +1,65 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
)
|
||||
|
||||
func TestAccAWSEcsDataSource_ecsContainerDefinition(t *testing.T) {
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: testAccCheckAwsEcsContainerDefinitionDataSourceConfig,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr("data.aws_ecs_container_definition.mongo", "image", "mongo:latest"),
|
||||
resource.TestCheckResourceAttr("data.aws_ecs_container_definition.mongo", "image_digest", "latest"),
|
||||
resource.TestCheckResourceAttr("data.aws_ecs_container_definition.mongo", "memory", "128"),
|
||||
resource.TestCheckResourceAttr("data.aws_ecs_container_definition.mongo", "memory_reservation", "64"),
|
||||
resource.TestCheckResourceAttr("data.aws_ecs_container_definition.mongo", "cpu", "128"),
|
||||
resource.TestCheckResourceAttr("data.aws_ecs_container_definition.mongo", "environment.SECRET", "KEY"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
const testAccCheckAwsEcsContainerDefinitionDataSourceConfig = `
|
||||
resource "aws_ecs_cluster" "default" {
|
||||
name = "terraformecstest1"
|
||||
}
|
||||
|
||||
resource "aws_ecs_task_definition" "mongo" {
|
||||
family = "mongodb"
|
||||
container_definitions = <<DEFINITION
|
||||
[
|
||||
{
|
||||
"cpu": 128,
|
||||
"environment": [{
|
||||
"name": "SECRET",
|
||||
"value": "KEY"
|
||||
}],
|
||||
"essential": true,
|
||||
"image": "mongo:latest",
|
||||
"memory": 128,
|
||||
"memoryReservation": 64,
|
||||
"name": "mongodb"
|
||||
}
|
||||
]
|
||||
DEFINITION
|
||||
}
|
||||
|
||||
resource "aws_ecs_service" "mongo" {
|
||||
name = "mongodb"
|
||||
cluster = "${aws_ecs_cluster.default.id}"
|
||||
task_definition = "${aws_ecs_task_definition.mongo.arn}"
|
||||
desired_count = 1
|
||||
}
|
||||
|
||||
data "aws_ecs_container_definition" "mongo" {
|
||||
task_definition = "${aws_ecs_task_definition.mongo.id}"
|
||||
container_name = "mongodb"
|
||||
}
|
||||
`
|
|
@ -0,0 +1,59 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
// See http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/enable-access-logs.html#attach-bucket-policy
|
||||
var elbAccountIdPerRegionMap = map[string]string{
|
||||
"ap-northeast-1": "582318560864",
|
||||
"ap-northeast-2": "600734575887",
|
||||
"ap-south-1": "718504428378",
|
||||
"ap-southeast-1": "114774131450",
|
||||
"ap-southeast-2": "783225319266",
|
||||
"cn-north-1": "638102146993",
|
||||
"eu-central-1": "054676820928",
|
||||
"eu-west-1": "156460612806",
|
||||
"sa-east-1": "507241528517",
|
||||
"us-east-1": "127311923021",
|
||||
"us-east-2": "033677994240",
|
||||
"us-gov-west": "048591011584",
|
||||
"us-west-1": "027434742980",
|
||||
"us-west-2": "797873946194",
|
||||
}
|
||||
|
||||
func dataSourceAwsElbServiceAccount() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Read: dataSourceAwsElbServiceAccountRead,
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"region": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
"arn": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func dataSourceAwsElbServiceAccountRead(d *schema.ResourceData, meta interface{}) error {
|
||||
region := meta.(*AWSClient).region
|
||||
if v, ok := d.GetOk("region"); ok {
|
||||
region = v.(string)
|
||||
}
|
||||
|
||||
if accid, ok := elbAccountIdPerRegionMap[region]; ok {
|
||||
d.SetId(accid)
|
||||
|
||||
d.Set("arn", "arn:aws:iam::"+accid+":root")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
return fmt.Errorf("Unknown region (%q)", region)
|
||||
}
|
|
@ -0,0 +1,40 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
)
|
||||
|
||||
func TestAccAWSElbServiceAccount_basic(t *testing.T) {
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: testAccCheckAwsElbServiceAccountConfig,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr("data.aws_elb_service_account.main", "id", "797873946194"),
|
||||
resource.TestCheckResourceAttr("data.aws_elb_service_account.main", "arn", "arn:aws:iam::797873946194:root"),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: testAccCheckAwsElbServiceAccountExplicitRegionConfig,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr("data.aws_elb_service_account.regional", "id", "156460612806"),
|
||||
resource.TestCheckResourceAttr("data.aws_elb_service_account.regional", "arn", "arn:aws:iam::156460612806:root"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
const testAccCheckAwsElbServiceAccountConfig = `
|
||||
data "aws_elb_service_account" "main" { }
|
||||
`
|
||||
|
||||
const testAccCheckAwsElbServiceAccountExplicitRegionConfig = `
|
||||
data "aws_elb_service_account" "regional" {
|
||||
region = "eu-west-1"
|
||||
}
|
||||
`
|
|
@ -24,20 +24,20 @@ func dataSourceAwsIamPolicyDocument() *schema.Resource {
|
|||
Read: dataSourceAwsIamPolicyDocumentRead,
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"id": &schema.Schema{
|
||||
"policy_id": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
"statement": &schema.Schema{
|
||||
Type: schema.TypeSet,
|
||||
"statement": {
|
||||
Type: schema.TypeList,
|
||||
Required: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"id": &schema.Schema{
|
||||
"sid": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
"effect": &schema.Schema{
|
||||
"effect": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Default: "Allow",
|
||||
|
@ -48,20 +48,20 @@ func dataSourceAwsIamPolicyDocument() *schema.Resource {
|
|||
"not_resources": setOfString,
|
||||
"principals": dataSourceAwsIamPolicyPrincipalSchema(),
|
||||
"not_principals": dataSourceAwsIamPolicyPrincipalSchema(),
|
||||
"condition": &schema.Schema{
|
||||
"condition": {
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"test": &schema.Schema{
|
||||
"test": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
"variable": &schema.Schema{
|
||||
"variable": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
"values": &schema.Schema{
|
||||
"values": {
|
||||
Type: schema.TypeSet,
|
||||
Required: true,
|
||||
Elem: &schema.Schema{
|
||||
|
@ -74,7 +74,7 @@ func dataSourceAwsIamPolicyDocument() *schema.Resource {
|
|||
},
|
||||
},
|
||||
},
|
||||
"json": &schema.Schema{
|
||||
"json": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
|
@ -87,11 +87,11 @@ func dataSourceAwsIamPolicyDocumentRead(d *schema.ResourceData, meta interface{}
|
|||
Version: "2012-10-17",
|
||||
}
|
||||
|
||||
if policyId, hasPolicyId := d.GetOk("id"); hasPolicyId {
|
||||
if policyId, hasPolicyId := d.GetOk("policy_id"); hasPolicyId {
|
||||
doc.Id = policyId.(string)
|
||||
}
|
||||
|
||||
var cfgStmts = d.Get("statement").(*schema.Set).List()
|
||||
var cfgStmts = d.Get("statement").([]interface{})
|
||||
stmts := make([]*IAMPolicyStatement, len(cfgStmts))
|
||||
doc.Statements = stmts
|
||||
for i, stmtI := range cfgStmts {
|
||||
|
@ -100,6 +100,10 @@ func dataSourceAwsIamPolicyDocumentRead(d *schema.ResourceData, meta interface{}
|
|||
Effect: cfgStmt["effect"].(string),
|
||||
}
|
||||
|
||||
if sid, ok := cfgStmt["sid"]; ok {
|
||||
stmt.Sid = sid.(string)
|
||||
}
|
||||
|
||||
if actions := cfgStmt["actions"].(*schema.Set).List(); len(actions) > 0 {
|
||||
stmt.Actions = iamPolicyDecodeConfigStringList(actions)
|
||||
}
|
||||
|
@ -146,12 +150,19 @@ func dataSourceAwsIamPolicyDocumentRead(d *schema.ResourceData, meta interface{}
|
|||
return nil
|
||||
}
|
||||
|
||||
func dataSourceAwsIamPolicyDocumentReplaceVarsInList(in []string) []string {
|
||||
out := make([]string, len(in))
|
||||
for i, item := range in {
|
||||
out[i] = dataSourceAwsIamPolicyDocumentVarReplacer.Replace(item)
|
||||
func dataSourceAwsIamPolicyDocumentReplaceVarsInList(in interface{}) interface{} {
|
||||
switch v := in.(type) {
|
||||
case string:
|
||||
return dataSourceAwsIamPolicyDocumentVarReplacer.Replace(v)
|
||||
case []string:
|
||||
out := make([]string, len(v))
|
||||
for i, item := range v {
|
||||
out[i] = dataSourceAwsIamPolicyDocumentVarReplacer.Replace(item)
|
||||
}
|
||||
return out
|
||||
default:
|
||||
panic("dataSourceAwsIamPolicyDocumentReplaceVarsInList: input not string nor []string")
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func dataSourceAwsIamPolicyDocumentMakeConditions(in []interface{}) IAMPolicyStatementConditionSet {
|
||||
|
|
|
@ -16,7 +16,7 @@ func TestAccAWSIAMPolicyDocument(t *testing.T) {
|
|||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
{
|
||||
Config: testAccAWSIAMPolicyDocumentConfig,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckStateValue(
|
||||
|
@ -52,7 +52,9 @@ func testAccCheckStateValue(id, name, value string) resource.TestCheckFunc {
|
|||
|
||||
var testAccAWSIAMPolicyDocumentConfig = `
|
||||
data "aws_iam_policy_document" "test" {
|
||||
policy_id = "policy_id"
|
||||
statement {
|
||||
sid = "1"
|
||||
actions = [
|
||||
"s3:ListAllMyBuckets",
|
||||
"s3:GetBucketLocation",
|
||||
|
@ -73,7 +75,6 @@ data "aws_iam_policy_document" "test" {
|
|||
test = "StringLike"
|
||||
variable = "s3:prefix"
|
||||
values = [
|
||||
"",
|
||||
"home/",
|
||||
"home/&{aws:username}/",
|
||||
]
|
||||
|
@ -105,68 +106,86 @@ data "aws_iam_policy_document" "test" {
|
|||
not_resources = ["arn:aws:s3:::*"]
|
||||
}
|
||||
|
||||
# Normalization of wildcard principals
|
||||
statement {
|
||||
effect = "Allow"
|
||||
actions = ["kinesis:*"]
|
||||
principals {
|
||||
type = "AWS"
|
||||
identifiers = ["*"]
|
||||
}
|
||||
}
|
||||
statement {
|
||||
effect = "Allow"
|
||||
actions = ["firehose:*"]
|
||||
principals {
|
||||
type = "*"
|
||||
identifiers = ["*"]
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
`
|
||||
|
||||
var testAccAWSIAMPolicyDocumentExpectedJSON = `{
|
||||
"Version": "2012-10-17",
|
||||
"Id": "policy_id",
|
||||
"Statement": [
|
||||
{
|
||||
"Sid": "1",
|
||||
"Effect": "Allow",
|
||||
"Action": [
|
||||
"s3:GetBucketLocation",
|
||||
"s3:ListAllMyBuckets"
|
||||
"s3:ListAllMyBuckets",
|
||||
"s3:GetBucketLocation"
|
||||
],
|
||||
"Resource": [
|
||||
"arn:aws:s3:::*"
|
||||
]
|
||||
"Resource": "arn:aws:s3:::*"
|
||||
},
|
||||
{
|
||||
"Sid": "",
|
||||
"Effect": "Allow",
|
||||
"Action": [
|
||||
"s3:ListBucket"
|
||||
],
|
||||
"Resource": [
|
||||
"arn:aws:s3:::foo"
|
||||
],
|
||||
"Action": "s3:ListBucket",
|
||||
"Resource": "arn:aws:s3:::foo",
|
||||
"NotPrincipal": {
|
||||
"AWS": [
|
||||
"arn:blahblah:example"
|
||||
]
|
||||
"AWS": "arn:blahblah:example"
|
||||
},
|
||||
"Condition": {
|
||||
"StringLike": {
|
||||
"s3:prefix": [
|
||||
"",
|
||||
"home/",
|
||||
"home/${aws:username}/"
|
||||
"home/${aws:username}/",
|
||||
"home/"
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"Sid": "",
|
||||
"Effect": "Allow",
|
||||
"Action": [
|
||||
"s3:*"
|
||||
],
|
||||
"Action": "s3:*",
|
||||
"Resource": [
|
||||
"arn:aws:s3:::foo/home/${aws:username}/*",
|
||||
"arn:aws:s3:::foo/home/${aws:username}"
|
||||
],
|
||||
"Principal": {
|
||||
"AWS": [
|
||||
"arn:blahblah:example"
|
||||
]
|
||||
"AWS": "arn:blahblah:example"
|
||||
}
|
||||
},
|
||||
{
|
||||
"Sid": "",
|
||||
"Effect": "Deny",
|
||||
"NotAction": [
|
||||
"s3:*"
|
||||
],
|
||||
"NotResource": [
|
||||
"arn:aws:s3:::*"
|
||||
]
|
||||
"NotAction": "s3:*",
|
||||
"NotResource": "arn:aws:s3:::*"
|
||||
},
|
||||
{
|
||||
"Sid": "",
|
||||
"Effect": "Allow",
|
||||
"Action": "kinesis:*",
|
||||
"Principal": "*"
|
||||
},
|
||||
{
|
||||
"Sid": "",
|
||||
"Effect": "Allow",
|
||||
"Action": "firehose:*",
|
||||
"Principal": "*"
|
||||
}
|
||||
]
|
||||
}`
|
||||
|
|
|
@ -0,0 +1,151 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/hashicorp/go-cleanhttp"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
type dataSourceAwsIPRangesResult struct {
|
||||
CreateDate string
|
||||
Prefixes []dataSourceAwsIPRangesPrefix
|
||||
SyncToken string
|
||||
}
|
||||
|
||||
type dataSourceAwsIPRangesPrefix struct {
|
||||
IpPrefix string `json:"ip_prefix"`
|
||||
Region string
|
||||
Service string
|
||||
}
|
||||
|
||||
func dataSourceAwsIPRanges() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Read: dataSourceAwsIPRangesRead,
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"cidr_blocks": &schema.Schema{
|
||||
Type: schema.TypeList,
|
||||
Computed: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
},
|
||||
"create_date": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"regions": &schema.Schema{
|
||||
Type: schema.TypeSet,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
Optional: true,
|
||||
},
|
||||
"services": &schema.Schema{
|
||||
Type: schema.TypeSet,
|
||||
Required: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
},
|
||||
"sync_token": &schema.Schema{
|
||||
Type: schema.TypeInt,
|
||||
Computed: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func dataSourceAwsIPRangesRead(d *schema.ResourceData, meta interface{}) error {
|
||||
|
||||
conn := cleanhttp.DefaultClient()
|
||||
|
||||
log.Printf("[DEBUG] Reading IP ranges")
|
||||
|
||||
res, err := conn.Get("https://ip-ranges.amazonaws.com/ip-ranges.json")
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error listing IP ranges: %s", err)
|
||||
}
|
||||
|
||||
defer res.Body.Close()
|
||||
|
||||
data, err := ioutil.ReadAll(res.Body)
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error reading response body: %s", err)
|
||||
}
|
||||
|
||||
result := new(dataSourceAwsIPRangesResult)
|
||||
|
||||
if err := json.Unmarshal(data, result); err != nil {
|
||||
return fmt.Errorf("Error parsing result: %s", err)
|
||||
}
|
||||
|
||||
if err := d.Set("create_date", result.CreateDate); err != nil {
|
||||
return fmt.Errorf("Error setting create date: %s", err)
|
||||
}
|
||||
|
||||
syncToken, err := strconv.Atoi(result.SyncToken)
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error while converting sync token: %s", err)
|
||||
}
|
||||
|
||||
d.SetId(result.SyncToken)
|
||||
|
||||
if err := d.Set("sync_token", syncToken); err != nil {
|
||||
return fmt.Errorf("Error setting sync token: %s", err)
|
||||
}
|
||||
|
||||
get := func(key string) *schema.Set {
|
||||
|
||||
set := d.Get(key).(*schema.Set)
|
||||
|
||||
for _, e := range set.List() {
|
||||
|
||||
s := e.(string)
|
||||
|
||||
set.Remove(s)
|
||||
set.Add(strings.ToLower(s))
|
||||
|
||||
}
|
||||
|
||||
return set
|
||||
|
||||
}
|
||||
|
||||
var (
|
||||
regions = get("regions")
|
||||
services = get("services")
|
||||
noRegionFilter = regions.Len() == 0
|
||||
prefixes []string
|
||||
)
|
||||
|
||||
for _, e := range result.Prefixes {
|
||||
|
||||
var (
|
||||
matchRegion = noRegionFilter || regions.Contains(strings.ToLower(e.Region))
|
||||
matchService = services.Contains(strings.ToLower(e.Service))
|
||||
)
|
||||
|
||||
if matchRegion && matchService {
|
||||
prefixes = append(prefixes, e.IpPrefix)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if len(prefixes) == 0 {
|
||||
return fmt.Errorf(" No IP ranges result from filters")
|
||||
}
|
||||
|
||||
sort.Strings(prefixes)
|
||||
|
||||
if err := d.Set("cidr_blocks", prefixes); err != nil {
|
||||
return fmt.Errorf("Error setting ip ranges: %s", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
|
||||
}
|
|
@ -0,0 +1,128 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strconv"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
)
|
||||
|
||||
func TestAccAWSIPRanges(t *testing.T) {
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: testAccAWSIPRangesConfig,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccAWSIPRanges("data.aws_ip_ranges.some"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func testAccAWSIPRanges(n string) resource.TestCheckFunc {
|
||||
return func(s *terraform.State) error {
|
||||
|
||||
r := s.RootModule().Resources[n]
|
||||
a := r.Primary.Attributes
|
||||
|
||||
var (
|
||||
cidrBlockSize int
|
||||
createDate time.Time
|
||||
err error
|
||||
syncToken int
|
||||
)
|
||||
|
||||
if cidrBlockSize, err = strconv.Atoi(a["cidr_blocks.#"]); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if cidrBlockSize < 10 {
|
||||
return fmt.Errorf("cidr_blocks for eu-west-1 seem suspiciously low: %d", cidrBlockSize)
|
||||
}
|
||||
|
||||
if createDate, err = time.Parse("2006-01-02-15-04-05", a["create_date"]); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if syncToken, err = strconv.Atoi(a["sync_token"]); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if syncToken != int(createDate.Unix()) {
|
||||
return fmt.Errorf("sync_token %d does not match create_date %s", syncToken, createDate)
|
||||
}
|
||||
|
||||
var cidrBlocks sort.StringSlice = make([]string, cidrBlockSize)
|
||||
|
||||
for i := range make([]string, cidrBlockSize) {
|
||||
|
||||
block := a[fmt.Sprintf("cidr_blocks.%d", i)]
|
||||
|
||||
if _, _, err := net.ParseCIDR(block); err != nil {
|
||||
return fmt.Errorf("malformed CIDR block %s: %s", block, err)
|
||||
}
|
||||
|
||||
cidrBlocks[i] = block
|
||||
|
||||
}
|
||||
|
||||
if !sort.IsSorted(cidrBlocks) {
|
||||
return fmt.Errorf("unexpected order of cidr_blocks: %s", cidrBlocks)
|
||||
}
|
||||
|
||||
var (
|
||||
regionMember = regexp.MustCompile(`regions\.\d+`)
|
||||
regions, services int
|
||||
serviceMember = regexp.MustCompile(`services\.\d+`)
|
||||
)
|
||||
|
||||
for k, v := range a {
|
||||
|
||||
if regionMember.MatchString(k) {
|
||||
|
||||
if !(v == "eu-west-1" || v == "EU-central-1") {
|
||||
return fmt.Errorf("unexpected region %s", v)
|
||||
}
|
||||
|
||||
regions = regions + 1
|
||||
|
||||
}
|
||||
|
||||
if serviceMember.MatchString(k) {
|
||||
|
||||
if v != "EC2" {
|
||||
return fmt.Errorf("unexpected service %s", v)
|
||||
}
|
||||
|
||||
services = services + 1
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if regions != 2 {
|
||||
return fmt.Errorf("unexpected number of regions: %d", regions)
|
||||
}
|
||||
|
||||
if services != 1 {
|
||||
return fmt.Errorf("unexpected number of services: %d", services)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
const testAccAWSIPRangesConfig = `
|
||||
data "aws_ip_ranges" "some" {
|
||||
regions = [ "eu-west-1", "EU-central-1" ]
|
||||
services = [ "EC2" ]
|
||||
}
|
||||
`
|
|
@ -0,0 +1,76 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/service/ec2"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func dataSourceAwsPrefixList() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Read: dataSourceAwsPrefixListRead,
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"prefix_list_id": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
"name": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
// Computed values.
|
||||
"id": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
"cidr_blocks": &schema.Schema{
|
||||
Type: schema.TypeList,
|
||||
Computed: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func dataSourceAwsPrefixListRead(d *schema.ResourceData, meta interface{}) error {
|
||||
conn := meta.(*AWSClient).ec2conn
|
||||
|
||||
req := &ec2.DescribePrefixListsInput{}
|
||||
|
||||
if prefixListID := d.Get("prefix_list_id"); prefixListID != "" {
|
||||
req.PrefixListIds = aws.StringSlice([]string{prefixListID.(string)})
|
||||
}
|
||||
req.Filters = buildEC2AttributeFilterList(
|
||||
map[string]string{
|
||||
"prefix-list-name": d.Get("name").(string),
|
||||
},
|
||||
)
|
||||
|
||||
log.Printf("[DEBUG] DescribePrefixLists %s\n", req)
|
||||
resp, err := conn.DescribePrefixLists(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if resp == nil || len(resp.PrefixLists) == 0 {
|
||||
return fmt.Errorf("no matching prefix list found; the prefix list ID or name may be invalid or not exist in the current region")
|
||||
}
|
||||
|
||||
pl := resp.PrefixLists[0]
|
||||
|
||||
d.SetId(*pl.PrefixListId)
|
||||
d.Set("id", pl.PrefixListId)
|
||||
d.Set("name", pl.PrefixListName)
|
||||
|
||||
cidrs := make([]string, len(pl.Cidrs))
|
||||
for i, v := range pl.Cidrs {
|
||||
cidrs[i] = *v
|
||||
}
|
||||
d.Set("cidr_blocks", cidrs)
|
||||
|
||||
return nil
|
||||
}
|
|
@ -0,0 +1,72 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
)
|
||||
|
||||
func TestAccDataSourceAwsPrefixList(t *testing.T) {
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: testAccDataSourceAwsPrefixListConfig,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccDataSourceAwsPrefixListCheck("data.aws_prefix_list.s3_by_id"),
|
||||
testAccDataSourceAwsPrefixListCheck("data.aws_prefix_list.s3_by_name"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func testAccDataSourceAwsPrefixListCheck(name string) resource.TestCheckFunc {
|
||||
return func(s *terraform.State) error {
|
||||
rs, ok := s.RootModule().Resources[name]
|
||||
if !ok {
|
||||
return fmt.Errorf("root module has no resource called %s", name)
|
||||
}
|
||||
|
||||
attr := rs.Primary.Attributes
|
||||
|
||||
if attr["name"] != "com.amazonaws.us-west-2.s3" {
|
||||
return fmt.Errorf("bad name %s", attr["name"])
|
||||
}
|
||||
if attr["id"] != "pl-68a54001" {
|
||||
return fmt.Errorf("bad id %s", attr["id"])
|
||||
}
|
||||
|
||||
var (
|
||||
cidrBlockSize int
|
||||
err error
|
||||
)
|
||||
|
||||
if cidrBlockSize, err = strconv.Atoi(attr["cidr_blocks.#"]); err != nil {
|
||||
return err
|
||||
}
|
||||
if cidrBlockSize < 1 {
|
||||
return fmt.Errorf("cidr_blocks seem suspiciously low: %d", cidrBlockSize)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
const testAccDataSourceAwsPrefixListConfig = `
|
||||
provider "aws" {
|
||||
region = "us-west-2"
|
||||
}
|
||||
|
||||
data "aws_prefix_list" "s3_by_id" {
|
||||
prefix_list_id = "pl-68a54001"
|
||||
}
|
||||
|
||||
data "aws_prefix_list" "s3_by_name" {
|
||||
name = "com.amazonaws.us-west-2.s3"
|
||||
}
|
||||
`
|
|
@ -0,0 +1,49 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
// See http://docs.aws.amazon.com/redshift/latest/mgmt/db-auditing.html#db-auditing-enable-logging
|
||||
var redshiftServiceAccountPerRegionMap = map[string]string{
|
||||
"us-east-1": "193672423079",
|
||||
"us-east-2": "391106570357",
|
||||
"us-west-1": "262260360010",
|
||||
"us-west-2": "902366379725",
|
||||
"ap-south-1": "865932855811",
|
||||
"ap-northeast-2": "760740231472",
|
||||
"ap-southeast-1": "361669875840",
|
||||
"ap-southeast-2": "762762565011",
|
||||
"ap-northeast-1": "404641285394",
|
||||
"eu-central-1": "053454850223",
|
||||
"eu-west-1": "210876761215",
|
||||
}
|
||||
|
||||
func dataSourceAwsRedshiftServiceAccount() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Read: dataSourceAwsRedshiftServiceAccountRead,
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"region": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func dataSourceAwsRedshiftServiceAccountRead(d *schema.ResourceData, meta interface{}) error {
|
||||
region := meta.(*AWSClient).region
|
||||
if v, ok := d.GetOk("region"); ok {
|
||||
region = v.(string)
|
||||
}
|
||||
|
||||
if accid, ok := redshiftServiceAccountPerRegionMap[region]; ok {
|
||||
d.SetId(accid)
|
||||
return nil
|
||||
}
|
||||
|
||||
return fmt.Errorf("Unknown region (%q)", region)
|
||||
}
|
|
@ -0,0 +1,38 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
)
|
||||
|
||||
func TestAccAWSRedshiftServiceAccount_basic(t *testing.T) {
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: testAccCheckAwsRedshiftServiceAccountConfig,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr("data.aws_redshift_service_account.main", "id", "902366379725"),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
Config: testAccCheckAwsRedshiftServiceAccountExplicitRegionConfig,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
resource.TestCheckResourceAttr("data.aws_redshift_service_account.regional", "id", "210876761215"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
const testAccCheckAwsRedshiftServiceAccountConfig = `
|
||||
data "aws_redshift_service_account" "main" { }
|
||||
`
|
||||
|
||||
const testAccCheckAwsRedshiftServiceAccountExplicitRegionConfig = `
|
||||
data "aws_redshift_service_account" "regional" {
|
||||
region = "eu-west-1"
|
||||
}
|
||||
`
|
|
@ -0,0 +1,84 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/service/ec2"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func dataSourceAwsRegion() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Read: dataSourceAwsRegionRead,
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"name": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"current": &schema.Schema{
|
||||
Type: schema.TypeBool,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"endpoint": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func dataSourceAwsRegionRead(d *schema.ResourceData, meta interface{}) error {
|
||||
conn := meta.(*AWSClient).ec2conn
|
||||
currentRegion := meta.(*AWSClient).region
|
||||
|
||||
req := &ec2.DescribeRegionsInput{}
|
||||
|
||||
req.RegionNames = make([]*string, 0, 2)
|
||||
if name := d.Get("name").(string); name != "" {
|
||||
req.RegionNames = append(req.RegionNames, aws.String(name))
|
||||
}
|
||||
|
||||
if d.Get("current").(bool) {
|
||||
req.RegionNames = append(req.RegionNames, aws.String(currentRegion))
|
||||
}
|
||||
|
||||
req.Filters = buildEC2AttributeFilterList(
|
||||
map[string]string{
|
||||
"endpoint": d.Get("endpoint").(string),
|
||||
},
|
||||
)
|
||||
if len(req.Filters) == 0 {
|
||||
// Don't send an empty filters list; the EC2 API won't accept it.
|
||||
req.Filters = nil
|
||||
}
|
||||
|
||||
log.Printf("[DEBUG] DescribeRegions %s\n", req)
|
||||
resp, err := conn.DescribeRegions(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if resp == nil || len(resp.Regions) == 0 {
|
||||
return fmt.Errorf("no matching regions found")
|
||||
}
|
||||
if len(resp.Regions) > 1 {
|
||||
return fmt.Errorf("multiple regions matched; use additional constraints to reduce matches to a single region")
|
||||
}
|
||||
|
||||
region := resp.Regions[0]
|
||||
|
||||
d.SetId(*region.RegionName)
|
||||
d.Set("id", region.RegionName)
|
||||
d.Set("name", region.RegionName)
|
||||
d.Set("endpoint", region.Endpoint)
|
||||
d.Set("current", *region.RegionName == currentRegion)
|
||||
|
||||
return nil
|
||||
}
|
|
@ -0,0 +1,64 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
)
|
||||
|
||||
func TestAccDataSourceAwsRegion(t *testing.T) {
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: testAccDataSourceAwsRegionConfig,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccDataSourceAwsRegionCheck("data.aws_region.by_name_current", "us-west-2", "true"),
|
||||
testAccDataSourceAwsRegionCheck("data.aws_region.by_name_other", "us-west-1", "false"),
|
||||
testAccDataSourceAwsRegionCheck("data.aws_region.by_current", "us-west-2", "true"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func testAccDataSourceAwsRegionCheck(name, region, current string) resource.TestCheckFunc {
|
||||
return func(s *terraform.State) error {
|
||||
rs, ok := s.RootModule().Resources[name]
|
||||
if !ok {
|
||||
return fmt.Errorf("root module has no resource called %s", name)
|
||||
}
|
||||
|
||||
attr := rs.Primary.Attributes
|
||||
|
||||
if attr["name"] != region {
|
||||
return fmt.Errorf("bad name %s", attr["name"])
|
||||
}
|
||||
if attr["current"] != current {
|
||||
return fmt.Errorf("bad current %s; want %s", attr["current"], current)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
const testAccDataSourceAwsRegionConfig = `
|
||||
provider "aws" {
|
||||
region = "us-west-2"
|
||||
}
|
||||
|
||||
data "aws_region" "by_name_current" {
|
||||
name = "us-west-2"
|
||||
}
|
||||
|
||||
data "aws_region" "by_name_other" {
|
||||
name = "us-west-1"
|
||||
}
|
||||
|
||||
data "aws_region" "by_current" {
|
||||
current = true
|
||||
}
|
||||
`
|
|
@ -0,0 +1,205 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
|
||||
"github.com/aws/aws-sdk-go/service/ec2"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func dataSourceAwsRouteTable() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Read: dataSourceAwsRouteTableRead,
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"subnet_id": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
"vpc_id": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
"filter": ec2CustomFiltersSchema(),
|
||||
"tags": tagsSchemaComputed(),
|
||||
"routes": {
|
||||
Type: schema.TypeList,
|
||||
Computed: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"cidr_block": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"gateway_id": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"instance_id": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"nat_gateway_id": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"vpc_peering_connection_id": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"network_interface_id": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"associations": {
|
||||
Type: schema.TypeList,
|
||||
Computed: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"route_table_association_id": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"route_table_id": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"subnet_id": {
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"main": {
|
||||
Type: schema.TypeBool,
|
||||
Computed: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func dataSourceAwsRouteTableRead(d *schema.ResourceData, meta interface{}) error {
|
||||
conn := meta.(*AWSClient).ec2conn
|
||||
req := &ec2.DescribeRouteTablesInput{}
|
||||
vpcId, vpcIdOk := d.GetOk("vpc_id")
|
||||
subnetId, subnetIdOk := d.GetOk("subnet_id")
|
||||
tags, tagsOk := d.GetOk("tags")
|
||||
filter, filterOk := d.GetOk("filter")
|
||||
|
||||
if !vpcIdOk && !subnetIdOk && !tagsOk && !filterOk {
|
||||
return fmt.Errorf("One of vpc_id, subnet_id, filters, or tags must be assigned")
|
||||
}
|
||||
req.Filters = buildEC2AttributeFilterList(
|
||||
map[string]string{
|
||||
"vpc-id": vpcId.(string),
|
||||
"association.subnet-id": subnetId.(string),
|
||||
},
|
||||
)
|
||||
req.Filters = append(req.Filters, buildEC2TagFilterList(
|
||||
tagsFromMap(tags.(map[string]interface{})),
|
||||
)...)
|
||||
req.Filters = append(req.Filters, buildEC2CustomFilterList(
|
||||
filter.(*schema.Set),
|
||||
)...)
|
||||
|
||||
log.Printf("[DEBUG] Describe Route Tables %v\n", req)
|
||||
resp, err := conn.DescribeRouteTables(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if resp == nil || len(resp.RouteTables) == 0 {
|
||||
return fmt.Errorf("Your query returned no results. Please change your search criteria and try again.")
|
||||
}
|
||||
if len(resp.RouteTables) > 1 {
|
||||
return fmt.Errorf("Multiple Route Table matched; use additional constraints to reduce matches to a single Route Table")
|
||||
}
|
||||
|
||||
rt := resp.RouteTables[0]
|
||||
|
||||
d.SetId(*rt.RouteTableId)
|
||||
d.Set("vpc_id", rt.VpcId)
|
||||
d.Set("tags", tagsToMap(rt.Tags))
|
||||
if err := d.Set("routes", dataSourceRoutesRead(rt.Routes)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := d.Set("associations", dataSourceAssociationsRead(rt.Associations)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func dataSourceRoutesRead(ec2Routes []*ec2.Route) []map[string]interface{} {
|
||||
routes := make([]map[string]interface{}, 0, len(ec2Routes))
|
||||
// Loop through the routes and add them to the set
|
||||
for _, r := range ec2Routes {
|
||||
if r.GatewayId != nil && *r.GatewayId == "local" {
|
||||
continue
|
||||
}
|
||||
|
||||
if r.Origin != nil && *r.Origin == "EnableVgwRoutePropagation" {
|
||||
continue
|
||||
}
|
||||
|
||||
if r.DestinationPrefixListId != nil {
|
||||
// Skipping because VPC endpoint routes are handled separately
|
||||
// See aws_vpc_endpoint
|
||||
continue
|
||||
}
|
||||
|
||||
m := make(map[string]interface{})
|
||||
|
||||
if r.DestinationCidrBlock != nil {
|
||||
m["cidr_block"] = *r.DestinationCidrBlock
|
||||
}
|
||||
if r.GatewayId != nil {
|
||||
m["gateway_id"] = *r.GatewayId
|
||||
}
|
||||
if r.NatGatewayId != nil {
|
||||
m["nat_gateway_id"] = *r.NatGatewayId
|
||||
}
|
||||
if r.InstanceId != nil {
|
||||
m["instance_id"] = *r.InstanceId
|
||||
}
|
||||
if r.VpcPeeringConnectionId != nil {
|
||||
m["vpc_peering_connection_id"] = *r.VpcPeeringConnectionId
|
||||
}
|
||||
if r.NetworkInterfaceId != nil {
|
||||
m["network_interface_id"] = *r.NetworkInterfaceId
|
||||
}
|
||||
|
||||
routes = append(routes, m)
|
||||
}
|
||||
return routes
|
||||
}
|
||||
|
||||
func dataSourceAssociationsRead(ec2Assocations []*ec2.RouteTableAssociation) []map[string]interface{} {
|
||||
associations := make([]map[string]interface{}, 0, len(ec2Assocations))
|
||||
// Loop through the routes and add them to the set
|
||||
for _, a := range ec2Assocations {
|
||||
|
||||
m := make(map[string]interface{})
|
||||
m["route_table_id"] = *a.RouteTableId
|
||||
m["route_table_association_id"] = *a.RouteTableAssociationId
|
||||
m["subnet_id"] = *a.SubnetId
|
||||
m["main"] = *a.Main
|
||||
associations = append(associations, m)
|
||||
}
|
||||
return associations
|
||||
}
|
|
@ -0,0 +1,132 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
)
|
||||
|
||||
func TestAccDataSourceAwsRouteTable(t *testing.T) {
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: testAccDataSourceAwsRouteTableGroupConfig,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccDataSourceAwsRouteTableCheck("data.aws_route_table.by_tag"),
|
||||
testAccDataSourceAwsRouteTableCheck("data.aws_route_table.by_filter"),
|
||||
testAccDataSourceAwsRouteTableCheck("data.aws_route_table.by_subnet"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func testAccDataSourceAwsRouteTableCheck(name string) resource.TestCheckFunc {
|
||||
return func(s *terraform.State) error {
|
||||
rs, ok := s.RootModule().Resources[name]
|
||||
|
||||
if !ok {
|
||||
return fmt.Errorf("root module has no resource called %s", name)
|
||||
}
|
||||
|
||||
rts, ok := s.RootModule().Resources["aws_route_table.test"]
|
||||
if !ok {
|
||||
return fmt.Errorf("can't find aws_route_table.test in state")
|
||||
}
|
||||
vpcRs, ok := s.RootModule().Resources["aws_vpc.test"]
|
||||
if !ok {
|
||||
return fmt.Errorf("can't find aws_vpc.test in state")
|
||||
}
|
||||
subnetRs, ok := s.RootModule().Resources["aws_subnet.test"]
|
||||
if !ok {
|
||||
return fmt.Errorf("can't find aws_subnet.test in state")
|
||||
}
|
||||
attr := rs.Primary.Attributes
|
||||
|
||||
if attr["id"] != rts.Primary.Attributes["id"] {
|
||||
return fmt.Errorf(
|
||||
"id is %s; want %s",
|
||||
attr["id"],
|
||||
rts.Primary.Attributes["id"],
|
||||
)
|
||||
}
|
||||
|
||||
if attr["vpc_id"] != vpcRs.Primary.Attributes["id"] {
|
||||
return fmt.Errorf(
|
||||
"vpc_id is %s; want %s",
|
||||
attr["vpc_id"],
|
||||
vpcRs.Primary.Attributes["id"],
|
||||
)
|
||||
}
|
||||
|
||||
if attr["tags.Name"] != "terraform-testacc-routetable-data-source" {
|
||||
return fmt.Errorf("bad Name tag %s", attr["tags.Name"])
|
||||
}
|
||||
if attr["associations.0.subnet_id"] != subnetRs.Primary.Attributes["id"] {
|
||||
return fmt.Errorf(
|
||||
"subnet_id is %v; want %s",
|
||||
attr["associations.0.subnet_id"],
|
||||
subnetRs.Primary.Attributes["id"],
|
||||
)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
const testAccDataSourceAwsRouteTableGroupConfig = `
|
||||
provider "aws" {
|
||||
region = "eu-central-1"
|
||||
}
|
||||
resource "aws_vpc" "test" {
|
||||
cidr_block = "172.16.0.0/16"
|
||||
|
||||
tags {
|
||||
Name = "terraform-testacc-data-source"
|
||||
}
|
||||
}
|
||||
|
||||
resource "aws_subnet" "test" {
|
||||
cidr_block = "172.16.0.0/24"
|
||||
vpc_id = "${aws_vpc.test.id}"
|
||||
tags {
|
||||
Name = "terraform-testacc-data-source"
|
||||
}
|
||||
}
|
||||
|
||||
resource "aws_route_table" "test" {
|
||||
vpc_id = "${aws_vpc.test.id}"
|
||||
tags {
|
||||
Name = "terraform-testacc-routetable-data-source"
|
||||
}
|
||||
}
|
||||
|
||||
resource "aws_route_table_association" "a" {
|
||||
subnet_id = "${aws_subnet.test.id}"
|
||||
route_table_id = "${aws_route_table.test.id}"
|
||||
}
|
||||
|
||||
data "aws_route_table" "by_filter" {
|
||||
filter {
|
||||
name = "association.route-table-association-id"
|
||||
values = ["${aws_route_table_association.a.id}"]
|
||||
}
|
||||
depends_on = ["aws_route_table_association.a"]
|
||||
}
|
||||
|
||||
data "aws_route_table" "by_tag" {
|
||||
tags {
|
||||
Name = "${aws_route_table.test.tags["Name"]}"
|
||||
}
|
||||
depends_on = ["aws_route_table_association.a"]
|
||||
}
|
||||
data "aws_route_table" "by_subnet" {
|
||||
subnet_id = "${aws_subnet.test.id}"
|
||||
depends_on = ["aws_route_table_association.a"]
|
||||
}
|
||||
|
||||
`
|
|
@ -155,10 +155,16 @@ func dataSourceAwsS3BucketObjectRead(d *schema.ResourceData, meta interface{}) e
|
|||
d.Set("metadata", pointersMapToStringList(out.Metadata))
|
||||
d.Set("server_side_encryption", out.ServerSideEncryption)
|
||||
d.Set("sse_kms_key_id", out.SSEKMSKeyId)
|
||||
d.Set("storage_class", out.StorageClass)
|
||||
d.Set("version_id", out.VersionId)
|
||||
d.Set("website_redirect_location", out.WebsiteRedirectLocation)
|
||||
|
||||
// The "STANDARD" (which is also the default) storage
|
||||
// class when set would not be included in the results.
|
||||
d.Set("storage_class", s3.StorageClassStandard)
|
||||
if out.StorageClass != nil {
|
||||
d.Set("storage_class", out.StorageClass)
|
||||
}
|
||||
|
||||
if isContentTypeAllowed(out.ContentType) {
|
||||
input := s3.GetObjectInput{
|
||||
Bucket: aws.String(bucket),
|
||||
|
|
|
@ -154,12 +154,12 @@ func TestAccDataSourceAWSS3BucketObject_allParams(t *testing.T) {
|
|||
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "server_side_encryption", ""),
|
||||
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "sse_kms_key_id", ""),
|
||||
// Supported, but difficult to reproduce in short testing time
|
||||
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "storage_class", ""),
|
||||
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "storage_class", "STANDARD"),
|
||||
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "expiration", ""),
|
||||
// Currently unsupported in aws_s3_bucket_object resource
|
||||
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "expires", ""),
|
||||
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "website_redirect_location", ""),
|
||||
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "metadata.#", "0"),
|
||||
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "metadata.%", "0"),
|
||||
),
|
||||
},
|
||||
},
|
|
@ -0,0 +1,86 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/service/ec2"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func dataSourceAwsSecurityGroup() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Read: dataSourceAwsSecurityGroupRead,
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"vpc_id": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
"name": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
"filter": ec2CustomFiltersSchema(),
|
||||
|
||||
"id": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
"tags": tagsSchemaComputed(),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func dataSourceAwsSecurityGroupRead(d *schema.ResourceData, meta interface{}) error {
|
||||
conn := meta.(*AWSClient).ec2conn
|
||||
req := &ec2.DescribeSecurityGroupsInput{}
|
||||
|
||||
if id, idExists := d.GetOk("id"); idExists {
|
||||
req.GroupIds = []*string{aws.String(id.(string))}
|
||||
}
|
||||
|
||||
req.Filters = buildEC2AttributeFilterList(
|
||||
map[string]string{
|
||||
"group-name": d.Get("name").(string),
|
||||
"vpc-id": d.Get("vpc_id").(string),
|
||||
},
|
||||
)
|
||||
req.Filters = append(req.Filters, buildEC2TagFilterList(
|
||||
tagsFromMap(d.Get("tags").(map[string]interface{})),
|
||||
)...)
|
||||
req.Filters = append(req.Filters, buildEC2CustomFilterList(
|
||||
d.Get("filter").(*schema.Set),
|
||||
)...)
|
||||
if len(req.Filters) == 0 {
|
||||
// Don't send an empty filters list; the EC2 API won't accept it.
|
||||
req.Filters = nil
|
||||
}
|
||||
|
||||
log.Printf("[DEBUG] Describe Security Groups %v\n", req)
|
||||
resp, err := conn.DescribeSecurityGroups(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if resp == nil || len(resp.SecurityGroups) == 0 {
|
||||
return fmt.Errorf("no matching SecurityGroup found")
|
||||
}
|
||||
if len(resp.SecurityGroups) > 1 {
|
||||
return fmt.Errorf("multiple Security Groups matched; use additional constraints to reduce matches to a single Security Group")
|
||||
}
|
||||
|
||||
sg := resp.SecurityGroups[0]
|
||||
|
||||
d.SetId(*sg.GroupId)
|
||||
d.Set("id", sg.VpcId)
|
||||
d.Set("name", sg.GroupName)
|
||||
d.Set("description", sg.Description)
|
||||
d.Set("vpc_id", sg.VpcId)
|
||||
d.Set("tags", tagsToMap(sg.Tags))
|
||||
|
||||
return nil
|
||||
}
|
|
@ -0,0 +1,141 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
)
|
||||
|
||||
func TestAccDataSourceAwsSecurityGroup(t *testing.T) {
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: testAccDataSourceAwsSecurityGroupConfig,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccDataSourceAwsSecurityGroupCheck("data.aws_security_group.by_id"),
|
||||
testAccDataSourceAwsSecurityGroupCheck("data.aws_security_group.by_tag"),
|
||||
testAccDataSourceAwsSecurityGroupCheck("data.aws_security_group.by_filter"),
|
||||
testAccDataSourceAwsSecurityGroupCheck("data.aws_security_group.by_name"),
|
||||
testAccDataSourceAwsSecurityGroupCheckDefault("data.aws_security_group.default_by_name"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func testAccDataSourceAwsSecurityGroupCheck(name string) resource.TestCheckFunc {
|
||||
return func(s *terraform.State) error {
|
||||
rs, ok := s.RootModule().Resources[name]
|
||||
if !ok {
|
||||
return fmt.Errorf("root module has no resource called %s", name)
|
||||
}
|
||||
|
||||
SGRs, ok := s.RootModule().Resources["aws_security_group.test"]
|
||||
if !ok {
|
||||
return fmt.Errorf("can't find aws_security_group.test in state")
|
||||
}
|
||||
vpcRs, ok := s.RootModule().Resources["aws_vpc.test"]
|
||||
if !ok {
|
||||
return fmt.Errorf("can't find aws_vpc.test in state")
|
||||
}
|
||||
attr := rs.Primary.Attributes
|
||||
|
||||
if attr["id"] != SGRs.Primary.Attributes["id"] {
|
||||
return fmt.Errorf(
|
||||
"id is %s; want %s",
|
||||
attr["id"],
|
||||
SGRs.Primary.Attributes["id"],
|
||||
)
|
||||
}
|
||||
|
||||
if attr["vpc_id"] != vpcRs.Primary.Attributes["id"] {
|
||||
return fmt.Errorf(
|
||||
"vpc_id is %s; want %s",
|
||||
attr["vpc_id"],
|
||||
vpcRs.Primary.Attributes["id"],
|
||||
)
|
||||
}
|
||||
|
||||
if attr["tags.Name"] != "terraform-testacc-security-group-data-source" {
|
||||
return fmt.Errorf("bad Name tag %s", attr["tags.Name"])
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func testAccDataSourceAwsSecurityGroupCheckDefault(name string) resource.TestCheckFunc {
|
||||
return func(s *terraform.State) error {
|
||||
rs, ok := s.RootModule().Resources[name]
|
||||
if !ok {
|
||||
return fmt.Errorf("root module has no resource called %s", name)
|
||||
}
|
||||
|
||||
vpcRs, ok := s.RootModule().Resources["aws_vpc.test"]
|
||||
if !ok {
|
||||
return fmt.Errorf("can't find aws_vpc.test in state")
|
||||
}
|
||||
attr := rs.Primary.Attributes
|
||||
|
||||
if attr["id"] != vpcRs.Primary.Attributes["default_security_group_id"] {
|
||||
return fmt.Errorf(
|
||||
"id is %s; want %s",
|
||||
attr["id"],
|
||||
vpcRs.Primary.Attributes["default_security_group_id"],
|
||||
)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
const testAccDataSourceAwsSecurityGroupConfig = `
|
||||
provider "aws" {
|
||||
region = "eu-west-1"
|
||||
}
|
||||
resource "aws_vpc" "test" {
|
||||
cidr_block = "172.16.0.0/16"
|
||||
|
||||
tags {
|
||||
Name = "terraform-testacc-subnet-data-source"
|
||||
}
|
||||
}
|
||||
|
||||
resource "aws_security_group" "test" {
|
||||
vpc_id = "${aws_vpc.test.id}"
|
||||
name = "security-groupe-name-test"
|
||||
tags {
|
||||
Name = "terraform-testacc-security-group-data-source"
|
||||
}
|
||||
}
|
||||
|
||||
data "aws_security_group" "by_id" {
|
||||
id = "${aws_security_group.test.id}"
|
||||
}
|
||||
|
||||
data "aws_security_group" "by_name" {
|
||||
name = "${aws_security_group.test.name}"
|
||||
}
|
||||
|
||||
data "aws_security_group" "default_by_name" {
|
||||
vpc_id = "${aws_vpc.test.id}"
|
||||
name = "default"
|
||||
}
|
||||
|
||||
data "aws_security_group" "by_tag" {
|
||||
tags {
|
||||
Name = "${aws_security_group.test.tags["Name"]}"
|
||||
}
|
||||
}
|
||||
|
||||
data "aws_security_group" "by_filter" {
|
||||
filter {
|
||||
name = "group-name"
|
||||
values = ["${aws_security_group.test.name}"]
|
||||
}
|
||||
}
|
||||
`
|
|
@ -0,0 +1,123 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/service/ec2"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func dataSourceAwsSubnet() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Read: dataSourceAwsSubnetRead,
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"availability_zone": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"cidr_block": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"default_for_az": &schema.Schema{
|
||||
Type: schema.TypeBool,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"filter": ec2CustomFiltersSchema(),
|
||||
|
||||
"id": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"state": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"tags": tagsSchemaComputed(),
|
||||
|
||||
"vpc_id": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func dataSourceAwsSubnetRead(d *schema.ResourceData, meta interface{}) error {
|
||||
conn := meta.(*AWSClient).ec2conn
|
||||
|
||||
req := &ec2.DescribeSubnetsInput{}
|
||||
|
||||
if id := d.Get("id"); id != "" {
|
||||
req.SubnetIds = []*string{aws.String(id.(string))}
|
||||
}
|
||||
|
||||
// We specify default_for_az as boolean, but EC2 filters want
|
||||
// it to be serialized as a string. Note that setting it to
|
||||
// "false" here does not actually filter by it *not* being
|
||||
// the default, because Terraform can't distinguish between
|
||||
// "false" and "not set".
|
||||
defaultForAzStr := ""
|
||||
if d.Get("default_for_az").(bool) {
|
||||
defaultForAzStr = "true"
|
||||
}
|
||||
|
||||
req.Filters = buildEC2AttributeFilterList(
|
||||
map[string]string{
|
||||
"availabilityZone": d.Get("availability_zone").(string),
|
||||
"cidrBlock": d.Get("cidr_block").(string),
|
||||
"defaultForAz": defaultForAzStr,
|
||||
"state": d.Get("state").(string),
|
||||
"vpc-id": d.Get("vpc_id").(string),
|
||||
},
|
||||
)
|
||||
req.Filters = append(req.Filters, buildEC2TagFilterList(
|
||||
tagsFromMap(d.Get("tags").(map[string]interface{})),
|
||||
)...)
|
||||
req.Filters = append(req.Filters, buildEC2CustomFilterList(
|
||||
d.Get("filter").(*schema.Set),
|
||||
)...)
|
||||
if len(req.Filters) == 0 {
|
||||
// Don't send an empty filters list; the EC2 API won't accept it.
|
||||
req.Filters = nil
|
||||
}
|
||||
|
||||
log.Printf("[DEBUG] DescribeSubnets %s\n", req)
|
||||
resp, err := conn.DescribeSubnets(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if resp == nil || len(resp.Subnets) == 0 {
|
||||
return fmt.Errorf("no matching subnet found")
|
||||
}
|
||||
if len(resp.Subnets) > 1 {
|
||||
return fmt.Errorf("multiple subnets matched; use additional constraints to reduce matches to a single subnet")
|
||||
}
|
||||
|
||||
subnet := resp.Subnets[0]
|
||||
|
||||
d.SetId(*subnet.SubnetId)
|
||||
d.Set("id", subnet.SubnetId)
|
||||
d.Set("vpc_id", subnet.VpcId)
|
||||
d.Set("availability_zone", subnet.AvailabilityZone)
|
||||
d.Set("cidr_block", subnet.CidrBlock)
|
||||
d.Set("default_for_az", subnet.DefaultForAz)
|
||||
d.Set("state", subnet.State)
|
||||
d.Set("tags", tagsToMap(subnet.Tags))
|
||||
|
||||
return nil
|
||||
}
|
|
@ -0,0 +1,125 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
)
|
||||
|
||||
func TestAccDataSourceAwsSubnet(t *testing.T) {
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: testAccDataSourceAwsSubnetConfig,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccDataSourceAwsSubnetCheck("data.aws_subnet.by_id"),
|
||||
testAccDataSourceAwsSubnetCheck("data.aws_subnet.by_cidr"),
|
||||
testAccDataSourceAwsSubnetCheck("data.aws_subnet.by_tag"),
|
||||
testAccDataSourceAwsSubnetCheck("data.aws_subnet.by_vpc"),
|
||||
testAccDataSourceAwsSubnetCheck("data.aws_subnet.by_filter"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func testAccDataSourceAwsSubnetCheck(name string) resource.TestCheckFunc {
|
||||
return func(s *terraform.State) error {
|
||||
rs, ok := s.RootModule().Resources[name]
|
||||
if !ok {
|
||||
return fmt.Errorf("root module has no resource called %s", name)
|
||||
}
|
||||
|
||||
vpcRs, ok := s.RootModule().Resources["aws_vpc.test"]
|
||||
if !ok {
|
||||
return fmt.Errorf("can't find aws_vpc.test in state")
|
||||
}
|
||||
subnetRs, ok := s.RootModule().Resources["aws_subnet.test"]
|
||||
if !ok {
|
||||
return fmt.Errorf("can't find aws_subnet.test in state")
|
||||
}
|
||||
|
||||
attr := rs.Primary.Attributes
|
||||
|
||||
if attr["id"] != subnetRs.Primary.Attributes["id"] {
|
||||
return fmt.Errorf(
|
||||
"id is %s; want %s",
|
||||
attr["id"],
|
||||
subnetRs.Primary.Attributes["id"],
|
||||
)
|
||||
}
|
||||
|
||||
if attr["vpc_id"] != vpcRs.Primary.Attributes["id"] {
|
||||
return fmt.Errorf(
|
||||
"vpc_id is %s; want %s",
|
||||
attr["vpc_id"],
|
||||
vpcRs.Primary.Attributes["id"],
|
||||
)
|
||||
}
|
||||
|
||||
if attr["cidr_block"] != "172.16.123.0/24" {
|
||||
return fmt.Errorf("bad cidr_block %s", attr["cidr_block"])
|
||||
}
|
||||
if attr["availability_zone"] != "us-west-2a" {
|
||||
return fmt.Errorf("bad availability_zone %s", attr["availability_zone"])
|
||||
}
|
||||
if attr["tags.Name"] != "terraform-testacc-subnet-data-source" {
|
||||
return fmt.Errorf("bad Name tag %s", attr["tags.Name"])
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
const testAccDataSourceAwsSubnetConfig = `
|
||||
provider "aws" {
|
||||
region = "us-west-2"
|
||||
}
|
||||
|
||||
resource "aws_vpc" "test" {
|
||||
cidr_block = "172.16.0.0/16"
|
||||
|
||||
tags {
|
||||
Name = "terraform-testacc-subnet-data-source"
|
||||
}
|
||||
}
|
||||
|
||||
resource "aws_subnet" "test" {
|
||||
vpc_id = "${aws_vpc.test.id}"
|
||||
cidr_block = "172.16.123.0/24"
|
||||
availability_zone = "us-west-2a"
|
||||
|
||||
tags {
|
||||
Name = "terraform-testacc-subnet-data-source"
|
||||
}
|
||||
}
|
||||
|
||||
data "aws_subnet" "by_id" {
|
||||
id = "${aws_subnet.test.id}"
|
||||
}
|
||||
|
||||
data "aws_subnet" "by_cidr" {
|
||||
cidr_block = "${aws_subnet.test.cidr_block}"
|
||||
}
|
||||
|
||||
data "aws_subnet" "by_tag" {
|
||||
tags {
|
||||
Name = "${aws_subnet.test.tags["Name"]}"
|
||||
}
|
||||
}
|
||||
|
||||
data "aws_subnet" "by_vpc" {
|
||||
vpc_id = "${aws_subnet.test.vpc_id}"
|
||||
}
|
||||
|
||||
data "aws_subnet" "by_filter" {
|
||||
filter {
|
||||
name = "vpc-id"
|
||||
values = ["${aws_subnet.test.vpc_id}"]
|
||||
}
|
||||
}
|
||||
`
|
|
@ -0,0 +1,121 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/service/ec2"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func dataSourceAwsVpc() *schema.Resource {
|
||||
return &schema.Resource{
|
||||
Read: dataSourceAwsVpcRead,
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"cidr_block": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"dhcp_options_id": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"default": &schema.Schema{
|
||||
Type: schema.TypeBool,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"filter": ec2CustomFiltersSchema(),
|
||||
|
||||
"id": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"instance_tenancy": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"state": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"tags": tagsSchemaComputed(),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func dataSourceAwsVpcRead(d *schema.ResourceData, meta interface{}) error {
|
||||
conn := meta.(*AWSClient).ec2conn
|
||||
|
||||
req := &ec2.DescribeVpcsInput{}
|
||||
|
||||
if id := d.Get("id"); id != "" {
|
||||
req.VpcIds = []*string{aws.String(id.(string))}
|
||||
}
|
||||
|
||||
// We specify "default" as boolean, but EC2 filters want
|
||||
// it to be serialized as a string. Note that setting it to
|
||||
// "false" here does not actually filter by it *not* being
|
||||
// the default, because Terraform can't distinguish between
|
||||
// "false" and "not set".
|
||||
isDefaultStr := ""
|
||||
if d.Get("default").(bool) {
|
||||
isDefaultStr = "true"
|
||||
}
|
||||
|
||||
req.Filters = buildEC2AttributeFilterList(
|
||||
map[string]string{
|
||||
"cidr": d.Get("cidr_block").(string),
|
||||
"dhcp-options-id": d.Get("dhcp_options_id").(string),
|
||||
"isDefault": isDefaultStr,
|
||||
"state": d.Get("state").(string),
|
||||
},
|
||||
)
|
||||
req.Filters = append(req.Filters, buildEC2TagFilterList(
|
||||
tagsFromMap(d.Get("tags").(map[string]interface{})),
|
||||
)...)
|
||||
req.Filters = append(req.Filters, buildEC2CustomFilterList(
|
||||
d.Get("filter").(*schema.Set),
|
||||
)...)
|
||||
if len(req.Filters) == 0 {
|
||||
// Don't send an empty filters list; the EC2 API won't accept it.
|
||||
req.Filters = nil
|
||||
}
|
||||
|
||||
log.Printf("[DEBUG] DescribeVpcs %s\n", req)
|
||||
resp, err := conn.DescribeVpcs(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if resp == nil || len(resp.Vpcs) == 0 {
|
||||
return fmt.Errorf("no matching VPC found")
|
||||
}
|
||||
if len(resp.Vpcs) > 1 {
|
||||
return fmt.Errorf("multiple VPCs matched; use additional constraints to reduce matches to a single VPC")
|
||||
}
|
||||
|
||||
vpc := resp.Vpcs[0]
|
||||
|
||||
d.SetId(*vpc.VpcId)
|
||||
d.Set("id", vpc.VpcId)
|
||||
d.Set("cidr_block", vpc.CidrBlock)
|
||||
d.Set("dhcp_options_id", vpc.DhcpOptionsId)
|
||||
d.Set("instance_tenancy", vpc.InstanceTenancy)
|
||||
d.Set("default", vpc.IsDefault)
|
||||
d.Set("state", vpc.State)
|
||||
d.Set("tags", tagsToMap(vpc.Tags))
|
||||
|
||||
return nil
|
||||
}
|
|
@ -0,0 +1,95 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
"github.com/hashicorp/terraform/terraform"
|
||||
)
|
||||
|
||||
func TestAccDataSourceAwsVpc(t *testing.T) {
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: testAccDataSourceAwsVpcConfig,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccDataSourceAwsVpcCheck("data.aws_vpc.by_id"),
|
||||
testAccDataSourceAwsVpcCheck("data.aws_vpc.by_cidr"),
|
||||
testAccDataSourceAwsVpcCheck("data.aws_vpc.by_tag"),
|
||||
testAccDataSourceAwsVpcCheck("data.aws_vpc.by_filter"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func testAccDataSourceAwsVpcCheck(name string) resource.TestCheckFunc {
|
||||
return func(s *terraform.State) error {
|
||||
rs, ok := s.RootModule().Resources[name]
|
||||
if !ok {
|
||||
return fmt.Errorf("root module has no resource called %s", name)
|
||||
}
|
||||
|
||||
vpcRs, ok := s.RootModule().Resources["aws_vpc.test"]
|
||||
if !ok {
|
||||
return fmt.Errorf("can't find aws_vpc.test in state")
|
||||
}
|
||||
|
||||
attr := rs.Primary.Attributes
|
||||
|
||||
if attr["id"] != vpcRs.Primary.Attributes["id"] {
|
||||
return fmt.Errorf(
|
||||
"id is %s; want %s",
|
||||
attr["id"],
|
||||
vpcRs.Primary.Attributes["id"],
|
||||
)
|
||||
}
|
||||
|
||||
if attr["cidr_block"] != "172.16.0.0/16" {
|
||||
return fmt.Errorf("bad cidr_block %s", attr["cidr_block"])
|
||||
}
|
||||
if attr["tags.Name"] != "terraform-testacc-vpc-data-source" {
|
||||
return fmt.Errorf("bad Name tag %s", attr["tags.Name"])
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
const testAccDataSourceAwsVpcConfig = `
|
||||
provider "aws" {
|
||||
region = "us-west-2"
|
||||
}
|
||||
|
||||
resource "aws_vpc" "test" {
|
||||
cidr_block = "172.16.0.0/16"
|
||||
|
||||
tags {
|
||||
Name = "terraform-testacc-vpc-data-source"
|
||||
}
|
||||
}
|
||||
|
||||
data "aws_vpc" "by_id" {
|
||||
id = "${aws_vpc.test.id}"
|
||||
}
|
||||
|
||||
data "aws_vpc" "by_cidr" {
|
||||
cidr_block = "${aws_vpc.test.cidr_block}"
|
||||
}
|
||||
|
||||
data "aws_vpc" "by_tag" {
|
||||
tags {
|
||||
Name = "${aws_vpc.test.tags["Name"]}"
|
||||
}
|
||||
}
|
||||
|
||||
data "aws_vpc" "by_filter" {
|
||||
filter {
|
||||
name = "cidr"
|
||||
values = ["${aws_vpc.test.cidr_block}"]
|
||||
}
|
||||
}
|
||||
`
|
|
@ -0,0 +1,15 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
"github.com/jen20/awspolicyequivalence"
|
||||
)
|
||||
|
||||
func suppressEquivalentAwsPolicyDiffs(k, old, new string, d *schema.ResourceData) bool {
|
||||
equivalent, err := awspolicy.PoliciesAreEquivalent(old, new)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
return equivalent
|
||||
}
|
|
@ -0,0 +1,163 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/service/ec2"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
// buildEC2AttributeFilterList takes a flat map of scalar attributes (most
|
||||
// likely values extracted from a *schema.ResourceData on an EC2-querying
|
||||
// data source) and produces a []*ec2.Filter representing an exact match
|
||||
// for each of the given non-empty attributes.
|
||||
//
|
||||
// The keys of the given attributes map are the attribute names expected
|
||||
// by the EC2 API, which are usually either in camelcase or with dash-separated
|
||||
// words. We conventionally map these to underscore-separated identifiers
|
||||
// with the same words when presenting these as data source query attributes
|
||||
// in Terraform.
|
||||
//
|
||||
// It's the callers responsibility to transform any non-string values into
|
||||
// the appropriate string serialization required by the AWS API when
|
||||
// encoding the given filter. Any attributes given with empty string values
|
||||
// are ignored, assuming that the user wishes to leave that attribute
|
||||
// unconstrained while filtering.
|
||||
//
|
||||
// The purpose of this function is to create values to pass in
|
||||
// for the "Filters" attribute on most of the "Describe..." API functions in
|
||||
// the EC2 API, to aid in the implementation of Terraform data sources that
|
||||
// retrieve data about EC2 objects.
|
||||
func buildEC2AttributeFilterList(attrs map[string]string) []*ec2.Filter {
|
||||
var filters []*ec2.Filter
|
||||
|
||||
// sort the filters by name to make the output deterministic
|
||||
var names []string
|
||||
for filterName := range attrs {
|
||||
names = append(names, filterName)
|
||||
}
|
||||
|
||||
sort.Strings(names)
|
||||
|
||||
for _, filterName := range names {
|
||||
value := attrs[filterName]
|
||||
if value == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
filters = append(filters, &ec2.Filter{
|
||||
Name: aws.String(filterName),
|
||||
Values: []*string{aws.String(value)},
|
||||
})
|
||||
}
|
||||
|
||||
return filters
|
||||
}
|
||||
|
||||
// buildEC2TagFilterList takes a []*ec2.Tag and produces a []*ec2.Filter that
|
||||
// represents exact matches for all of the tag key/value pairs given in
|
||||
// the tag set.
|
||||
//
|
||||
// The purpose of this function is to create values to pass in for
|
||||
// the "Filters" attribute on most of the "Describe..." API functions
|
||||
// in the EC2 API, to implement filtering by tag values e.g. in Terraform
|
||||
// data sources that retrieve data about EC2 objects.
|
||||
//
|
||||
// It is conventional for an EC2 data source to include an attribute called
|
||||
// "tags" which conforms to the schema returned by the tagsSchema() function.
|
||||
// The value of this can then be converted to a tags slice using tagsFromMap,
|
||||
// and the result finally passed in to this function.
|
||||
//
|
||||
// In Terraform configuration this would then look like this, to constrain
|
||||
// results by name:
|
||||
//
|
||||
// tags {
|
||||
// Name = "my-awesome-subnet"
|
||||
// }
|
||||
func buildEC2TagFilterList(tags []*ec2.Tag) []*ec2.Filter {
|
||||
filters := make([]*ec2.Filter, len(tags))
|
||||
|
||||
for i, tag := range tags {
|
||||
filters[i] = &ec2.Filter{
|
||||
Name: aws.String(fmt.Sprintf("tag:%s", *tag.Key)),
|
||||
Values: []*string{tag.Value},
|
||||
}
|
||||
}
|
||||
|
||||
return filters
|
||||
}
|
||||
|
||||
// ec2CustomFiltersSchema returns a *schema.Schema that represents
|
||||
// a set of custom filtering criteria that a user can specify as input
|
||||
// to a data source that wraps one of the many "Describe..." API calls
|
||||
// in the EC2 API.
|
||||
//
|
||||
// It is conventional for an attribute of this type to be included
|
||||
// as a top-level attribute called "filter". This is the "catch all" for
|
||||
// filter combinations that are not possible to express using scalar
|
||||
// attributes or tags. In Terraform configuration, the custom filter blocks
|
||||
// then look like this:
|
||||
//
|
||||
// filter {
|
||||
// name = "availabilityZone"
|
||||
// values = ["us-west-2a", "us-west-2b"]
|
||||
// }
|
||||
func ec2CustomFiltersSchema() *schema.Schema {
|
||||
return &schema.Schema{
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"name": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
"values": &schema.Schema{
|
||||
Type: schema.TypeSet,
|
||||
Required: true,
|
||||
Elem: &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// buildEC2CustomFilterList takes the set value extracted from a schema
|
||||
// attribute conforming to the schema returned by ec2CustomFiltersSchema,
|
||||
// and transforms it into a []*ec2.Filter representing the same filter
|
||||
// expressions which is ready to pass into the "Filters" attribute on most
|
||||
// of the "Describe..." functions in the EC2 API.
|
||||
//
|
||||
// This function is intended only to be used in conjunction with
|
||||
// ec2CustomFitlersSchema. See the docs on that function for more details
|
||||
// on the configuration pattern this is intended to support.
|
||||
func buildEC2CustomFilterList(filterSet *schema.Set) []*ec2.Filter {
|
||||
if filterSet == nil {
|
||||
return []*ec2.Filter{}
|
||||
}
|
||||
|
||||
customFilters := filterSet.List()
|
||||
filters := make([]*ec2.Filter, len(customFilters))
|
||||
|
||||
for filterIdx, customFilterI := range customFilters {
|
||||
customFilterMapI := customFilterI.(map[string]interface{})
|
||||
name := customFilterMapI["name"].(string)
|
||||
valuesI := customFilterMapI["values"].(*schema.Set).List()
|
||||
values := make([]*string, len(valuesI))
|
||||
for valueIdx, valueI := range valuesI {
|
||||
values[valueIdx] = aws.String(valueI.(string))
|
||||
}
|
||||
|
||||
filters[filterIdx] = &ec2.Filter{
|
||||
Name: &name,
|
||||
Values: values,
|
||||
}
|
||||
}
|
||||
|
||||
return filters
|
||||
}
|
|
@ -0,0 +1,158 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/service/ec2"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func TestBuildEC2AttributeFilterList(t *testing.T) {
|
||||
type TestCase struct {
|
||||
Attrs map[string]string
|
||||
Expected []*ec2.Filter
|
||||
}
|
||||
testCases := []TestCase{
|
||||
{
|
||||
map[string]string{
|
||||
"foo": "bar",
|
||||
"baz": "boo",
|
||||
},
|
||||
[]*ec2.Filter{
|
||||
{
|
||||
Name: aws.String("baz"),
|
||||
Values: []*string{aws.String("boo")},
|
||||
},
|
||||
{
|
||||
Name: aws.String("foo"),
|
||||
Values: []*string{aws.String("bar")},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
map[string]string{
|
||||
"foo": "bar",
|
||||
"baz": "",
|
||||
},
|
||||
[]*ec2.Filter{
|
||||
{
|
||||
Name: aws.String("foo"),
|
||||
Values: []*string{aws.String("bar")},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for i, testCase := range testCases {
|
||||
result := buildEC2AttributeFilterList(testCase.Attrs)
|
||||
|
||||
if !reflect.DeepEqual(result, testCase.Expected) {
|
||||
t.Errorf(
|
||||
"test case %d: got %#v, but want %#v",
|
||||
i, result, testCase.Expected,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildEC2TagFilterList(t *testing.T) {
|
||||
type TestCase struct {
|
||||
Tags []*ec2.Tag
|
||||
Expected []*ec2.Filter
|
||||
}
|
||||
testCases := []TestCase{
|
||||
{
|
||||
[]*ec2.Tag{
|
||||
{
|
||||
Key: aws.String("foo"),
|
||||
Value: aws.String("bar"),
|
||||
},
|
||||
{
|
||||
Key: aws.String("baz"),
|
||||
Value: aws.String("boo"),
|
||||
},
|
||||
},
|
||||
[]*ec2.Filter{
|
||||
{
|
||||
Name: aws.String("tag:foo"),
|
||||
Values: []*string{aws.String("bar")},
|
||||
},
|
||||
{
|
||||
Name: aws.String("tag:baz"),
|
||||
Values: []*string{aws.String("boo")},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for i, testCase := range testCases {
|
||||
result := buildEC2TagFilterList(testCase.Tags)
|
||||
|
||||
if !reflect.DeepEqual(result, testCase.Expected) {
|
||||
t.Errorf(
|
||||
"test case %d: got %#v, but want %#v",
|
||||
i, result, testCase.Expected,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildEC2CustomFilterList(t *testing.T) {
|
||||
|
||||
// We need to get a set with the appropriate hash function,
|
||||
// so we'll use the schema to help us produce what would
|
||||
// be produced in the normal case.
|
||||
filtersSchema := ec2CustomFiltersSchema()
|
||||
|
||||
// The zero value of this schema will be an interface{}
|
||||
// referring to a new, empty *schema.Set with the
|
||||
// appropriate hash function configured.
|
||||
filters := filtersSchema.ZeroValue().(*schema.Set)
|
||||
|
||||
// We also need an appropriately-configured set for
|
||||
// the list of values.
|
||||
valuesSchema := filtersSchema.Elem.(*schema.Resource).Schema["values"]
|
||||
valuesSet := func(vals ...string) *schema.Set {
|
||||
ret := valuesSchema.ZeroValue().(*schema.Set)
|
||||
for _, val := range vals {
|
||||
ret.Add(val)
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
filters.Add(map[string]interface{}{
|
||||
"name": "foo",
|
||||
"values": valuesSet("bar", "baz"),
|
||||
})
|
||||
filters.Add(map[string]interface{}{
|
||||
"name": "pizza",
|
||||
"values": valuesSet("cheese"),
|
||||
})
|
||||
|
||||
expected := []*ec2.Filter{
|
||||
// These are produced in the deterministic order guaranteed
|
||||
// by schema.Set.List(), which happens to produce them in
|
||||
// the following order for our current input. If this test
|
||||
// evolves with different input data in future then they
|
||||
// will likely be emitted in a different order, which is fine.
|
||||
{
|
||||
Name: aws.String("pizza"),
|
||||
Values: []*string{aws.String("cheese")},
|
||||
},
|
||||
{
|
||||
Name: aws.String("foo"),
|
||||
Values: []*string{aws.String("bar"), aws.String("baz")},
|
||||
},
|
||||
}
|
||||
result := buildEC2CustomFilterList(filters)
|
||||
|
||||
if !reflect.DeepEqual(result, expected) {
|
||||
t.Errorf(
|
||||
"got %#v, but want %#v",
|
||||
result, expected,
|
||||
)
|
||||
}
|
||||
}
|
|
@ -5,10 +5,12 @@ package aws
|
|||
// It currently cannot be generated from the API json.
|
||||
var hostedZoneIDsMap = map[string]string{
|
||||
"us-east-1": "Z3AQBSTGFYJSTF",
|
||||
"us-east-2": "Z2O1EMRO9K5GLX",
|
||||
"us-west-2": "Z3BJ6K6RIION7M",
|
||||
"us-west-1": "Z2F56UZL2M1ACD",
|
||||
"eu-west-1": "Z1BKCTXD74EZPE",
|
||||
"eu-central-1": "Z21DNDUVLTQW6Q",
|
||||
"ap-south-1": "Z11RGJOFQNVJUP",
|
||||
"ap-southeast-1": "Z3O0J2DXBE1FTB",
|
||||
"ap-southeast-2": "Z1WCIGYICN2BYD",
|
||||
"ap-northeast-1": "Z2M4EHUR26P7ZW",
|
||||
|
|
|
@ -2,21 +2,22 @@ package aws
|
|||
|
||||
import (
|
||||
"encoding/json"
|
||||
"sort"
|
||||
)
|
||||
|
||||
type IAMPolicyDoc struct {
|
||||
Id string `json:",omitempty"`
|
||||
Version string `json:",omitempty"`
|
||||
Id string `json:",omitempty"`
|
||||
Statements []*IAMPolicyStatement `json:"Statement"`
|
||||
}
|
||||
|
||||
type IAMPolicyStatement struct {
|
||||
Sid string `json:",omitempty"`
|
||||
Sid string
|
||||
Effect string `json:",omitempty"`
|
||||
Actions []string `json:"Action,omitempty"`
|
||||
NotActions []string `json:"NotAction,omitempty"`
|
||||
Resources []string `json:"Resource,omitempty"`
|
||||
NotResources []string `json:"NotResource,omitempty"`
|
||||
Actions interface{} `json:"Action,omitempty"`
|
||||
NotActions interface{} `json:"NotAction,omitempty"`
|
||||
Resources interface{} `json:"Resource,omitempty"`
|
||||
NotResources interface{} `json:"NotResource,omitempty"`
|
||||
Principals IAMPolicyStatementPrincipalSet `json:"Principal,omitempty"`
|
||||
NotPrincipals IAMPolicyStatementPrincipalSet `json:"NotPrincipal,omitempty"`
|
||||
Conditions IAMPolicyStatementConditionSet `json:"Condition,omitempty"`
|
||||
|
@ -24,51 +25,88 @@ type IAMPolicyStatement struct {
|
|||
|
||||
type IAMPolicyStatementPrincipal struct {
|
||||
Type string
|
||||
Identifiers []string
|
||||
Identifiers interface{}
|
||||
}
|
||||
|
||||
type IAMPolicyStatementCondition struct {
|
||||
Test string
|
||||
Variable string
|
||||
Values []string
|
||||
Values interface{}
|
||||
}
|
||||
|
||||
type IAMPolicyStatementPrincipalSet []IAMPolicyStatementPrincipal
|
||||
type IAMPolicyStatementConditionSet []IAMPolicyStatementCondition
|
||||
|
||||
func (ps IAMPolicyStatementPrincipalSet) MarshalJSON() ([]byte, error) {
|
||||
raw := map[string][]string{}
|
||||
raw := map[string]interface{}{}
|
||||
|
||||
// As a special case, IAM considers the string value "*" to be
|
||||
// equivalent to "AWS": "*", and normalizes policies as such.
|
||||
// We'll follow their lead and do the same normalization here.
|
||||
// IAM also considers {"*": "*"} to be equivalent to this.
|
||||
if len(ps) == 1 {
|
||||
p := ps[0]
|
||||
if p.Type == "AWS" || p.Type == "*" {
|
||||
if sv, ok := p.Identifiers.(string); ok && sv == "*" {
|
||||
return []byte(`"*"`), nil
|
||||
}
|
||||
|
||||
if av, ok := p.Identifiers.([]string); ok && len(av) == 1 && av[0] == "*" {
|
||||
return []byte(`"*"`), nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, p := range ps {
|
||||
if _, ok := raw[p.Type]; !ok {
|
||||
raw[p.Type] = make([]string, 0, len(p.Identifiers))
|
||||
switch i := p.Identifiers.(type) {
|
||||
case []string:
|
||||
if _, ok := raw[p.Type]; !ok {
|
||||
raw[p.Type] = make([]string, 0, len(i))
|
||||
}
|
||||
sort.Sort(sort.Reverse(sort.StringSlice(i)))
|
||||
raw[p.Type] = append(raw[p.Type].([]string), i...)
|
||||
case string:
|
||||
raw[p.Type] = i
|
||||
default:
|
||||
panic("Unsupported data type for IAMPolicyStatementPrincipalSet")
|
||||
}
|
||||
raw[p.Type] = append(raw[p.Type], p.Identifiers...)
|
||||
}
|
||||
|
||||
return json.Marshal(&raw)
|
||||
}
|
||||
|
||||
func (cs IAMPolicyStatementConditionSet) MarshalJSON() ([]byte, error) {
|
||||
raw := map[string]map[string][]string{}
|
||||
raw := map[string]map[string]interface{}{}
|
||||
|
||||
for _, c := range cs {
|
||||
if _, ok := raw[c.Test]; !ok {
|
||||
raw[c.Test] = map[string][]string{}
|
||||
raw[c.Test] = map[string]interface{}{}
|
||||
}
|
||||
if _, ok := raw[c.Test][c.Variable]; !ok {
|
||||
raw[c.Test][c.Variable] = make([]string, 0, len(c.Values))
|
||||
switch i := c.Values.(type) {
|
||||
case []string:
|
||||
if _, ok := raw[c.Test][c.Variable]; !ok {
|
||||
raw[c.Test][c.Variable] = make([]string, 0, len(i))
|
||||
}
|
||||
sort.Sort(sort.Reverse(sort.StringSlice(i)))
|
||||
raw[c.Test][c.Variable] = append(raw[c.Test][c.Variable].([]string), i...)
|
||||
case string:
|
||||
raw[c.Test][c.Variable] = i
|
||||
default:
|
||||
panic("Unsupported data type for IAMPolicyStatementConditionSet")
|
||||
}
|
||||
raw[c.Test][c.Variable] = append(raw[c.Test][c.Variable], c.Values...)
|
||||
}
|
||||
|
||||
return json.Marshal(&raw)
|
||||
}
|
||||
|
||||
func iamPolicyDecodeConfigStringList(lI []interface{}) []string {
|
||||
func iamPolicyDecodeConfigStringList(lI []interface{}) interface{} {
|
||||
if len(lI) == 1 {
|
||||
return lI[0].(string)
|
||||
}
|
||||
ret := make([]string, len(lI))
|
||||
for i, vI := range lI {
|
||||
ret[i] = vI.(string)
|
||||
}
|
||||
sort.Sort(sort.Reverse(sort.StringSlice(ret)))
|
||||
return ret
|
||||
}
|
||||
|
|
|
@ -0,0 +1,28 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
)
|
||||
|
||||
func TestAccAWSAPIGatewayAccount_importBasic(t *testing.T) {
|
||||
resourceName := "aws_api_gateway_account.test"
|
||||
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckAWSAPIGatewayAccountDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: testAccAWSAPIGatewayAccountConfig_empty,
|
||||
},
|
||||
|
||||
resource.TestStep{
|
||||
ResourceName: resourceName,
|
||||
ImportState: true,
|
||||
ImportStateVerify: true,
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
|
@ -0,0 +1,28 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
)
|
||||
|
||||
func TestAccAWSAPIGatewayApiKey_importBasic(t *testing.T) {
|
||||
resourceName := "aws_api_gateway_api_key.test"
|
||||
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckAWSAPIGatewayApiKeyDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: testAccAWSAPIGatewayApiKeyConfig,
|
||||
},
|
||||
|
||||
resource.TestStep{
|
||||
ResourceName: resourceName,
|
||||
ImportState: true,
|
||||
ImportStateVerify: true,
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
|
@ -0,0 +1,28 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/service/cloudfront"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
func resourceAwsCloudFrontDistributionImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) {
|
||||
conn := meta.(*AWSClient).cloudfrontconn
|
||||
id := d.Id()
|
||||
resp, err := conn.GetDistributionConfig(&cloudfront.GetDistributionConfigInput{
|
||||
Id: aws.String(id),
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
distConfig := resp.DistributionConfig
|
||||
results := make([]*schema.ResourceData, 1)
|
||||
err = flattenDistributionConfig(d, distConfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
results[0] = d
|
||||
return results, nil
|
||||
}
|
|
@ -0,0 +1,35 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/acctest"
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
)
|
||||
|
||||
func TestAccAWSCloudFrontDistribution_importBasic(t *testing.T) {
|
||||
ri := acctest.RandInt()
|
||||
testConfig := fmt.Sprintf(testAccAWSCloudFrontDistributionS3Config, ri, originBucket, logBucket, testAccAWSCloudFrontDistributionRetainConfig())
|
||||
|
||||
resourceName := "aws_cloudfront_distribution.s3_distribution"
|
||||
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckCloudFrontDistributionDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: testConfig,
|
||||
},
|
||||
resource.TestStep{
|
||||
ResourceName: resourceName,
|
||||
ImportState: true,
|
||||
ImportStateVerify: true,
|
||||
// Ignore retain_on_delete since it doesn't come from the AWS
|
||||
// API.
|
||||
ImportStateVerifyIgnore: []string{"retain_on_delete"},
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
|
@ -0,0 +1,28 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
)
|
||||
|
||||
func TestAccAWSCloudFrontOriginAccessIdentity_importBasic(t *testing.T) {
|
||||
resourceName := "aws_cloudfront_origin_access_identity.origin_access_identity"
|
||||
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckCloudFrontOriginAccessIdentityDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: testAccAWSCloudFrontOriginAccessIdentityConfig,
|
||||
},
|
||||
|
||||
resource.TestStep{
|
||||
ResourceName: resourceName,
|
||||
ImportState: true,
|
||||
ImportStateVerify: true,
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
|
@ -0,0 +1,31 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/acctest"
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
)
|
||||
|
||||
func TestAccAWSCloudTrail_importBasic(t *testing.T) {
|
||||
resourceName := "aws_cloudtrail.foobar"
|
||||
cloudTrailRandInt := acctest.RandInt()
|
||||
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckAWSCloudTrailDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: testAccAWSCloudTrailConfig(cloudTrailRandInt),
|
||||
},
|
||||
|
||||
resource.TestStep{
|
||||
ResourceName: resourceName,
|
||||
ImportState: true,
|
||||
ImportStateVerify: true,
|
||||
ImportStateVerifyIgnore: []string{"enable_log_file_validation", "is_multi_region_trail", "include_global_service_events", "enable_logging"},
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
)
|
||||
|
||||
func TestAccAWSCloudWatchEventRule_importBasic(t *testing.T) {
|
||||
resourceName := "aws_cloudwatch_event_rule.foo"
|
||||
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckAWSCloudWatchEventRuleDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: testAccAWSCloudWatchEventRuleConfig,
|
||||
},
|
||||
|
||||
resource.TestStep{
|
||||
ResourceName: resourceName,
|
||||
ImportState: true,
|
||||
ImportStateVerify: true,
|
||||
ImportStateVerifyIgnore: []string{"is_enabled"}, //this has a default value
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
|
@ -0,0 +1,31 @@
|
|||
package aws
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/acctest"
|
||||
"github.com/hashicorp/terraform/helper/resource"
|
||||
)
|
||||
|
||||
func TestAccAWSCloudWatchLogGroup_importBasic(t *testing.T) {
|
||||
resourceName := "aws_cloudwatch_log_group.foobar"
|
||||
rInt := acctest.RandInt()
|
||||
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckAWSCloudWatchLogGroupDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
Config: testAccAWSCloudWatchLogGroupConfig(rInt),
|
||||
},
|
||||
|
||||
resource.TestStep{
|
||||
ResourceName: resourceName,
|
||||
ImportState: true,
|
||||
ImportStateVerify: true,
|
||||
ImportStateVerifyIgnore: []string{"retention_in_days"}, //this has a default value
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue