provider/aws: Add s3_bucket_object data source
This commit is contained in:
parent
84ab00d92e
commit
d4fe1b9145
|
@ -0,0 +1,221 @@
|
||||||
|
package aws
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/aws/aws-sdk-go/aws"
|
||||||
|
"github.com/aws/aws-sdk-go/service/s3"
|
||||||
|
"github.com/hashicorp/terraform/helper/schema"
|
||||||
|
)
|
||||||
|
|
||||||
|
func dataSourceAwsS3BucketObject() *schema.Resource {
|
||||||
|
return &schema.Resource{
|
||||||
|
Read: dataSourceAwsS3BucketObjectRead,
|
||||||
|
|
||||||
|
Schema: map[string]*schema.Schema{
|
||||||
|
"body": &schema.Schema{
|
||||||
|
Type: schema.TypeString,
|
||||||
|
Computed: true,
|
||||||
|
},
|
||||||
|
"bucket": &schema.Schema{
|
||||||
|
Type: schema.TypeString,
|
||||||
|
Required: true,
|
||||||
|
},
|
||||||
|
"cache_control": &schema.Schema{
|
||||||
|
Type: schema.TypeString,
|
||||||
|
Computed: true,
|
||||||
|
},
|
||||||
|
"content_disposition": &schema.Schema{
|
||||||
|
Type: schema.TypeString,
|
||||||
|
Computed: true,
|
||||||
|
},
|
||||||
|
"content_encoding": &schema.Schema{
|
||||||
|
Type: schema.TypeString,
|
||||||
|
Computed: true,
|
||||||
|
},
|
||||||
|
"content_language": &schema.Schema{
|
||||||
|
Type: schema.TypeString,
|
||||||
|
Computed: true,
|
||||||
|
},
|
||||||
|
"content_length": &schema.Schema{
|
||||||
|
Type: schema.TypeInt,
|
||||||
|
Computed: true,
|
||||||
|
},
|
||||||
|
"content_type": &schema.Schema{
|
||||||
|
Type: schema.TypeString,
|
||||||
|
Computed: true,
|
||||||
|
},
|
||||||
|
"etag": &schema.Schema{
|
||||||
|
Type: schema.TypeString,
|
||||||
|
Computed: true,
|
||||||
|
},
|
||||||
|
"expiration": &schema.Schema{
|
||||||
|
Type: schema.TypeString,
|
||||||
|
Computed: true,
|
||||||
|
},
|
||||||
|
"expires": &schema.Schema{
|
||||||
|
Type: schema.TypeString,
|
||||||
|
Computed: true,
|
||||||
|
},
|
||||||
|
"key": &schema.Schema{
|
||||||
|
Type: schema.TypeString,
|
||||||
|
Required: true,
|
||||||
|
},
|
||||||
|
"last_modified": &schema.Schema{
|
||||||
|
Type: schema.TypeString,
|
||||||
|
Computed: true,
|
||||||
|
},
|
||||||
|
"metadata": &schema.Schema{
|
||||||
|
Type: schema.TypeMap,
|
||||||
|
Computed: true,
|
||||||
|
},
|
||||||
|
"range": &schema.Schema{
|
||||||
|
Type: schema.TypeString,
|
||||||
|
Optional: true,
|
||||||
|
},
|
||||||
|
"server_side_encryption": &schema.Schema{
|
||||||
|
Type: schema.TypeString,
|
||||||
|
Computed: true,
|
||||||
|
},
|
||||||
|
"sse_kms_key_id": &schema.Schema{
|
||||||
|
Type: schema.TypeString,
|
||||||
|
Computed: true,
|
||||||
|
},
|
||||||
|
"storage_class": &schema.Schema{
|
||||||
|
Type: schema.TypeString,
|
||||||
|
Computed: true,
|
||||||
|
},
|
||||||
|
"version_id": &schema.Schema{
|
||||||
|
Type: schema.TypeString,
|
||||||
|
Optional: true,
|
||||||
|
Computed: true,
|
||||||
|
},
|
||||||
|
"website_redirect_location": &schema.Schema{
|
||||||
|
Type: schema.TypeString,
|
||||||
|
Computed: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func dataSourceAwsS3BucketObjectRead(d *schema.ResourceData, meta interface{}) error {
|
||||||
|
conn := meta.(*AWSClient).s3conn
|
||||||
|
|
||||||
|
bucket := d.Get("bucket").(string)
|
||||||
|
key := d.Get("key").(string)
|
||||||
|
|
||||||
|
input := s3.HeadObjectInput{
|
||||||
|
Bucket: aws.String(bucket),
|
||||||
|
Key: aws.String(key),
|
||||||
|
}
|
||||||
|
if v, ok := d.GetOk("range"); ok {
|
||||||
|
input.Range = aws.String(v.(string))
|
||||||
|
}
|
||||||
|
if v, ok := d.GetOk("version_id"); ok {
|
||||||
|
input.VersionId = aws.String(v.(string))
|
||||||
|
}
|
||||||
|
|
||||||
|
versionText := ""
|
||||||
|
uniqueId := bucket + "/" + key
|
||||||
|
if v, ok := d.GetOk("version_id"); ok {
|
||||||
|
versionText = fmt.Sprintf(" of version %q", v.(string))
|
||||||
|
uniqueId += "@" + v.(string)
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Printf("[DEBUG] Reading S3 object: %s", input)
|
||||||
|
out, err := conn.HeadObject(&input)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("Failed getting S3 object: %s", err)
|
||||||
|
}
|
||||||
|
if out.DeleteMarker != nil && *out.DeleteMarker == true {
|
||||||
|
return fmt.Errorf("Requested S3 object %q%s has been deleted",
|
||||||
|
bucket+key, versionText)
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Printf("[DEBUG] Received S3 object: %s", out)
|
||||||
|
|
||||||
|
d.SetId(uniqueId)
|
||||||
|
|
||||||
|
d.Set("cache_control", out.CacheControl)
|
||||||
|
d.Set("content_disposition", out.ContentDisposition)
|
||||||
|
d.Set("content_encoding", out.ContentEncoding)
|
||||||
|
d.Set("content_language", out.ContentLanguage)
|
||||||
|
d.Set("content_length", out.ContentLength)
|
||||||
|
d.Set("content_type", out.ContentType)
|
||||||
|
// See https://forums.aws.amazon.com/thread.jspa?threadID=44003
|
||||||
|
d.Set("etag", strings.Trim(*out.ETag, `"`))
|
||||||
|
d.Set("expiration", out.Expiration)
|
||||||
|
d.Set("expires", out.Expires)
|
||||||
|
d.Set("last_modified", out.LastModified.Format(time.RFC1123))
|
||||||
|
d.Set("metadata", pointersMapToStringList(out.Metadata))
|
||||||
|
d.Set("server_side_encryption", out.ServerSideEncryption)
|
||||||
|
d.Set("sse_kms_key_id", out.SSEKMSKeyId)
|
||||||
|
d.Set("storage_class", out.StorageClass)
|
||||||
|
d.Set("version_id", out.VersionId)
|
||||||
|
d.Set("website_redirect_location", out.WebsiteRedirectLocation)
|
||||||
|
|
||||||
|
if isContentTypeAllowed(out.ContentType) {
|
||||||
|
input := s3.GetObjectInput{
|
||||||
|
Bucket: aws.String(bucket),
|
||||||
|
Key: aws.String(key),
|
||||||
|
}
|
||||||
|
if v, ok := d.GetOk("range"); ok {
|
||||||
|
input.Range = aws.String(v.(string))
|
||||||
|
}
|
||||||
|
if out.VersionId != nil {
|
||||||
|
input.VersionId = out.VersionId
|
||||||
|
}
|
||||||
|
out, err := conn.GetObject(&input)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("Failed getting S3 object: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
buf := new(bytes.Buffer)
|
||||||
|
bytesRead, err := buf.ReadFrom(out.Body)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("Failed reading content of S3 object (%s): %s",
|
||||||
|
uniqueId, err)
|
||||||
|
}
|
||||||
|
log.Printf("[INFO] Saving %d bytes from S3 object %s", bytesRead, uniqueId)
|
||||||
|
d.Set("body", buf.String())
|
||||||
|
} else {
|
||||||
|
contentType := ""
|
||||||
|
if out.ContentType == nil {
|
||||||
|
contentType = "<EMPTY>"
|
||||||
|
} else {
|
||||||
|
contentType = *out.ContentType
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Printf("[INFO] Ignoring body of S3 object %s with Content-Type %q",
|
||||||
|
uniqueId, contentType)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// This is to prevent potential issues w/ binary files
|
||||||
|
// and generally unprintable characters
|
||||||
|
// See https://github.com/hashicorp/terraform/pull/3858#issuecomment-156856738
|
||||||
|
func isContentTypeAllowed(contentType *string) bool {
|
||||||
|
if contentType == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
allowedContentTypes := []*regexp.Regexp{
|
||||||
|
regexp.MustCompile("^text/.+"),
|
||||||
|
regexp.MustCompile("^application/json$"),
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, r := range allowedContentTypes {
|
||||||
|
if r.MatchString(*contentType) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
|
@ -0,0 +1,297 @@
|
||||||
|
package aws
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"regexp"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/aws/aws-sdk-go/aws"
|
||||||
|
"github.com/aws/aws-sdk-go/service/s3"
|
||||||
|
"github.com/hashicorp/terraform/helper/acctest"
|
||||||
|
"github.com/hashicorp/terraform/helper/resource"
|
||||||
|
"github.com/hashicorp/terraform/terraform"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestAccDataSourceAWSS3BucketObject_basic(t *testing.T) {
|
||||||
|
rInt := acctest.RandInt()
|
||||||
|
resourceOnlyConf, conf := testAccAWSDataSourceS3ObjectConfig_basic(rInt)
|
||||||
|
|
||||||
|
var rObj s3.GetObjectOutput
|
||||||
|
var dsObj s3.GetObjectOutput
|
||||||
|
|
||||||
|
resource.Test(t, resource.TestCase{
|
||||||
|
PreCheck: func() { testAccPreCheck(t) },
|
||||||
|
Providers: testAccProviders,
|
||||||
|
PreventPostDestroyRefresh: true,
|
||||||
|
Steps: []resource.TestStep{
|
||||||
|
resource.TestStep{
|
||||||
|
Config: resourceOnlyConf,
|
||||||
|
Check: resource.ComposeTestCheckFunc(
|
||||||
|
testAccCheckAWSS3BucketObjectExists("aws_s3_bucket_object.object", &rObj),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
resource.TestStep{
|
||||||
|
Config: conf,
|
||||||
|
Check: resource.ComposeTestCheckFunc(
|
||||||
|
testAccCheckAwsS3ObjectDataSourceExists("data.aws_s3_bucket_object.obj", &dsObj),
|
||||||
|
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "content_length", "11"),
|
||||||
|
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "content_type", "binary/octet-stream"),
|
||||||
|
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "etag", "b10a8db164e0754105b7a99be72e3fe5"),
|
||||||
|
resource.TestMatchResourceAttr("data.aws_s3_bucket_object.obj", "last_modified",
|
||||||
|
regexp.MustCompile("^[a-zA-Z]{3}, [0-9]+ [a-zA-Z]+ [0-9]{4} [0-9:]+ [A-Z]+$")),
|
||||||
|
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "body", ""),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAccDataSourceAWSS3BucketObject_readableBody(t *testing.T) {
|
||||||
|
rInt := acctest.RandInt()
|
||||||
|
resourceOnlyConf, conf := testAccAWSDataSourceS3ObjectConfig_readableBody(rInt)
|
||||||
|
|
||||||
|
var rObj s3.GetObjectOutput
|
||||||
|
var dsObj s3.GetObjectOutput
|
||||||
|
|
||||||
|
resource.Test(t, resource.TestCase{
|
||||||
|
PreCheck: func() { testAccPreCheck(t) },
|
||||||
|
Providers: testAccProviders,
|
||||||
|
PreventPostDestroyRefresh: true,
|
||||||
|
Steps: []resource.TestStep{
|
||||||
|
resource.TestStep{
|
||||||
|
Config: resourceOnlyConf,
|
||||||
|
Check: resource.ComposeTestCheckFunc(
|
||||||
|
testAccCheckAWSS3BucketObjectExists("aws_s3_bucket_object.object", &rObj),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
resource.TestStep{
|
||||||
|
Config: conf,
|
||||||
|
Check: resource.ComposeTestCheckFunc(
|
||||||
|
testAccCheckAwsS3ObjectDataSourceExists("data.aws_s3_bucket_object.obj", &dsObj),
|
||||||
|
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "content_length", "3"),
|
||||||
|
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "content_type", "text/plain"),
|
||||||
|
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "etag", "a6105c0a611b41b08f1209506350279e"),
|
||||||
|
resource.TestMatchResourceAttr("data.aws_s3_bucket_object.obj", "last_modified",
|
||||||
|
regexp.MustCompile("^[a-zA-Z]{3}, [0-9]+ [a-zA-Z]+ [0-9]{4} [0-9:]+ [A-Z]+$")),
|
||||||
|
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "body", "yes"),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAccDataSourceAWSS3BucketObject_kmsEncrypted(t *testing.T) {
|
||||||
|
rInt := acctest.RandInt()
|
||||||
|
resourceOnlyConf, conf := testAccAWSDataSourceS3ObjectConfig_kmsEncrypted(rInt)
|
||||||
|
|
||||||
|
var rObj s3.GetObjectOutput
|
||||||
|
var dsObj s3.GetObjectOutput
|
||||||
|
|
||||||
|
resource.Test(t, resource.TestCase{
|
||||||
|
PreCheck: func() { testAccPreCheck(t) },
|
||||||
|
Providers: testAccProviders,
|
||||||
|
PreventPostDestroyRefresh: true,
|
||||||
|
Steps: []resource.TestStep{
|
||||||
|
resource.TestStep{
|
||||||
|
Config: resourceOnlyConf,
|
||||||
|
Check: resource.ComposeTestCheckFunc(
|
||||||
|
testAccCheckAWSS3BucketObjectExists("aws_s3_bucket_object.object", &rObj),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
resource.TestStep{
|
||||||
|
Config: conf,
|
||||||
|
Check: resource.ComposeTestCheckFunc(
|
||||||
|
testAccCheckAwsS3ObjectDataSourceExists("data.aws_s3_bucket_object.obj", &dsObj),
|
||||||
|
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "content_length", "22"),
|
||||||
|
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "content_type", "text/plain"),
|
||||||
|
resource.TestMatchResourceAttr("data.aws_s3_bucket_object.obj", "etag", regexp.MustCompile("^[a-f0-9]{32}$")),
|
||||||
|
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "server_side_encryption", "aws:kms"),
|
||||||
|
resource.TestMatchResourceAttr("data.aws_s3_bucket_object.obj", "sse_kms_key_id",
|
||||||
|
regexp.MustCompile("^arn:aws:kms:us-west-2:[0-9]{12}:key/[a-z0-9-]{36}$")),
|
||||||
|
resource.TestMatchResourceAttr("data.aws_s3_bucket_object.obj", "last_modified",
|
||||||
|
regexp.MustCompile("^[a-zA-Z]{3}, [0-9]+ [a-zA-Z]+ [0-9]{4} [0-9:]+ [A-Z]+$")),
|
||||||
|
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "body", "Keep Calm and Carry On"),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAccDataSourceAWSS3BucketObject_allParams(t *testing.T) {
|
||||||
|
rInt := acctest.RandInt()
|
||||||
|
resourceOnlyConf, conf := testAccAWSDataSourceS3ObjectConfig_allParams(rInt)
|
||||||
|
|
||||||
|
var rObj s3.GetObjectOutput
|
||||||
|
var dsObj s3.GetObjectOutput
|
||||||
|
|
||||||
|
resource.Test(t, resource.TestCase{
|
||||||
|
PreCheck: func() { testAccPreCheck(t) },
|
||||||
|
Providers: testAccProviders,
|
||||||
|
PreventPostDestroyRefresh: true,
|
||||||
|
Steps: []resource.TestStep{
|
||||||
|
resource.TestStep{
|
||||||
|
Config: resourceOnlyConf,
|
||||||
|
Check: resource.ComposeTestCheckFunc(
|
||||||
|
testAccCheckAWSS3BucketObjectExists("aws_s3_bucket_object.object", &rObj),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
resource.TestStep{
|
||||||
|
Config: conf,
|
||||||
|
Check: resource.ComposeTestCheckFunc(
|
||||||
|
testAccCheckAwsS3ObjectDataSourceExists("data.aws_s3_bucket_object.obj", &dsObj),
|
||||||
|
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "content_length", "21"),
|
||||||
|
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "content_type", "application/unknown"),
|
||||||
|
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "etag", "723f7a6ac0c57b445790914668f98640"),
|
||||||
|
resource.TestMatchResourceAttr("data.aws_s3_bucket_object.obj", "last_modified",
|
||||||
|
regexp.MustCompile("^[a-zA-Z]{3}, [0-9]+ [a-zA-Z]+ [0-9]{4} [0-9:]+ [A-Z]+$")),
|
||||||
|
resource.TestMatchResourceAttr("data.aws_s3_bucket_object.obj", "version_id", regexp.MustCompile("^.{32}$")),
|
||||||
|
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "body", ""),
|
||||||
|
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "cache_control", "no-cache"),
|
||||||
|
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "content_disposition", "attachment"),
|
||||||
|
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "content_encoding", "gzip"),
|
||||||
|
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "content_language", "en-GB"),
|
||||||
|
// Encryption is off
|
||||||
|
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "server_side_encryption", ""),
|
||||||
|
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "sse_kms_key_id", ""),
|
||||||
|
// Supported, but difficult to reproduce in short testing time
|
||||||
|
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "storage_class", ""),
|
||||||
|
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "expiration", ""),
|
||||||
|
// Currently unsupported in aws_s3_bucket_object resource
|
||||||
|
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "expires", ""),
|
||||||
|
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "website_redirect_location", ""),
|
||||||
|
resource.TestCheckResourceAttr("data.aws_s3_bucket_object.obj", "metadata.#", "0"),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func testAccCheckAwsS3ObjectDataSourceExists(n string, obj *s3.GetObjectOutput) resource.TestCheckFunc {
|
||||||
|
return func(s *terraform.State) error {
|
||||||
|
rs, ok := s.RootModule().Resources[n]
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf("Can't find S3 object data source: %s", n)
|
||||||
|
}
|
||||||
|
|
||||||
|
if rs.Primary.ID == "" {
|
||||||
|
return fmt.Errorf("S3 object data source ID not set")
|
||||||
|
}
|
||||||
|
|
||||||
|
s3conn := testAccProvider.Meta().(*AWSClient).s3conn
|
||||||
|
out, err := s3conn.GetObject(
|
||||||
|
&s3.GetObjectInput{
|
||||||
|
Bucket: aws.String(rs.Primary.Attributes["bucket"]),
|
||||||
|
Key: aws.String(rs.Primary.Attributes["key"]),
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("Failed getting S3 Object from %s: %s",
|
||||||
|
rs.Primary.Attributes["bucket"]+"/"+rs.Primary.Attributes["key"], err)
|
||||||
|
}
|
||||||
|
|
||||||
|
*obj = *out
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func testAccAWSDataSourceS3ObjectConfig_basic(randInt int) (string, string) {
|
||||||
|
resources := fmt.Sprintf(`
|
||||||
|
resource "aws_s3_bucket" "object_bucket" {
|
||||||
|
bucket = "tf-object-test-bucket-%d"
|
||||||
|
}
|
||||||
|
resource "aws_s3_bucket_object" "object" {
|
||||||
|
bucket = "${aws_s3_bucket.object_bucket.bucket}"
|
||||||
|
key = "tf-testing-obj-%d"
|
||||||
|
content = "Hello World"
|
||||||
|
}
|
||||||
|
`, randInt, randInt)
|
||||||
|
|
||||||
|
both := fmt.Sprintf(`%s
|
||||||
|
data "aws_s3_bucket_object" "obj" {
|
||||||
|
bucket = "tf-object-test-bucket-%d"
|
||||||
|
key = "tf-testing-obj-%d"
|
||||||
|
}`, resources, randInt, randInt)
|
||||||
|
|
||||||
|
return resources, both
|
||||||
|
}
|
||||||
|
|
||||||
|
func testAccAWSDataSourceS3ObjectConfig_readableBody(randInt int) (string, string) {
|
||||||
|
resources := fmt.Sprintf(`
|
||||||
|
resource "aws_s3_bucket" "object_bucket" {
|
||||||
|
bucket = "tf-object-test-bucket-%d"
|
||||||
|
}
|
||||||
|
resource "aws_s3_bucket_object" "object" {
|
||||||
|
bucket = "${aws_s3_bucket.object_bucket.bucket}"
|
||||||
|
key = "tf-testing-obj-%d-readable"
|
||||||
|
content = "yes"
|
||||||
|
content_type = "text/plain"
|
||||||
|
}
|
||||||
|
`, randInt, randInt)
|
||||||
|
|
||||||
|
both := fmt.Sprintf(`%s
|
||||||
|
data "aws_s3_bucket_object" "obj" {
|
||||||
|
bucket = "tf-object-test-bucket-%d"
|
||||||
|
key = "tf-testing-obj-%d-readable"
|
||||||
|
}`, resources, randInt, randInt)
|
||||||
|
|
||||||
|
return resources, both
|
||||||
|
}
|
||||||
|
|
||||||
|
func testAccAWSDataSourceS3ObjectConfig_kmsEncrypted(randInt int) (string, string) {
|
||||||
|
resources := fmt.Sprintf(`
|
||||||
|
resource "aws_s3_bucket" "object_bucket" {
|
||||||
|
bucket = "tf-object-test-bucket-%d"
|
||||||
|
}
|
||||||
|
resource "aws_kms_key" "example" {
|
||||||
|
description = "TF Acceptance Test KMS key"
|
||||||
|
deletion_window_in_days = 7
|
||||||
|
}
|
||||||
|
resource "aws_s3_bucket_object" "object" {
|
||||||
|
bucket = "${aws_s3_bucket.object_bucket.bucket}"
|
||||||
|
key = "tf-testing-obj-%d-encrypted"
|
||||||
|
content = "Keep Calm and Carry On"
|
||||||
|
content_type = "text/plain"
|
||||||
|
kms_key_id = "${aws_kms_key.example.arn}"
|
||||||
|
}
|
||||||
|
`, randInt, randInt)
|
||||||
|
|
||||||
|
both := fmt.Sprintf(`%s
|
||||||
|
data "aws_s3_bucket_object" "obj" {
|
||||||
|
bucket = "tf-object-test-bucket-%d"
|
||||||
|
key = "tf-testing-obj-%d-encrypted"
|
||||||
|
}`, resources, randInt, randInt)
|
||||||
|
|
||||||
|
return resources, both
|
||||||
|
}
|
||||||
|
|
||||||
|
func testAccAWSDataSourceS3ObjectConfig_allParams(randInt int) (string, string) {
|
||||||
|
resources := fmt.Sprintf(`
|
||||||
|
resource "aws_s3_bucket" "object_bucket" {
|
||||||
|
bucket = "tf-object-test-bucket-%d"
|
||||||
|
versioning {
|
||||||
|
enabled = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "aws_s3_bucket_object" "object" {
|
||||||
|
bucket = "${aws_s3_bucket.object_bucket.bucket}"
|
||||||
|
key = "tf-testing-obj-%d-all-params"
|
||||||
|
content = <<CONTENT
|
||||||
|
{"msg": "Hi there!"}
|
||||||
|
CONTENT
|
||||||
|
content_type = "application/unknown"
|
||||||
|
cache_control = "no-cache"
|
||||||
|
content_disposition = "attachment"
|
||||||
|
content_encoding = "gzip"
|
||||||
|
content_language = "en-GB"
|
||||||
|
}
|
||||||
|
`, randInt, randInt)
|
||||||
|
|
||||||
|
both := fmt.Sprintf(`%s
|
||||||
|
data "aws_s3_bucket_object" "obj" {
|
||||||
|
bucket = "tf-object-test-bucket-%d"
|
||||||
|
key = "tf-testing-obj-%d-all-params"
|
||||||
|
}`, resources, randInt, randInt)
|
||||||
|
|
||||||
|
return resources, both
|
||||||
|
}
|
|
@ -114,6 +114,7 @@ func Provider() terraform.ResourceProvider {
|
||||||
"aws_ami": dataSourceAwsAmi(),
|
"aws_ami": dataSourceAwsAmi(),
|
||||||
"aws_availability_zones": dataSourceAwsAvailabilityZones(),
|
"aws_availability_zones": dataSourceAwsAvailabilityZones(),
|
||||||
"aws_iam_policy_document": dataSourceAwsIamPolicyDocument(),
|
"aws_iam_policy_document": dataSourceAwsIamPolicyDocument(),
|
||||||
|
"aws_s3_bucket_object": dataSourceAwsS3BucketObject(),
|
||||||
},
|
},
|
||||||
|
|
||||||
ResourcesMap: map[string]*schema.Resource{
|
ResourcesMap: map[string]*schema.Resource{
|
||||||
|
|
|
@ -0,0 +1,81 @@
|
||||||
|
---
|
||||||
|
layout: "aws"
|
||||||
|
page_title: "AWS: aws_s3_object"
|
||||||
|
sidebar_current: "docs-aws-datasource-s3-object"
|
||||||
|
description: |-
|
||||||
|
Provides metadata and optionally content of an S3 object
|
||||||
|
---
|
||||||
|
|
||||||
|
# aws\_s3\_object
|
||||||
|
|
||||||
|
The S3 object data source allows access to the metadata and
|
||||||
|
_optionally_ (see below) content of an object stored inside S3 bucket.
|
||||||
|
|
||||||
|
~> **Note:** The content of an object (`body` field) is available only for objects which have a human-readable `Content-Type` (`text/*` and `application/json`). This is to prevent printing unsafe characters and potentially downloading large amount of data which would be thrown away in favour of metadata.
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
|
||||||
|
```
|
||||||
|
data "aws_s3_object" "lambda" {
|
||||||
|
bucket = "my-lambda-functions"
|
||||||
|
key = "hello-world.zip"
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "aws_iam_role" "iam_for_lambda" {
|
||||||
|
name = "iam_for_lambda"
|
||||||
|
assume_role_policy = <<EOF
|
||||||
|
{
|
||||||
|
"Version": "2012-10-17",
|
||||||
|
"Statement": [
|
||||||
|
{
|
||||||
|
"Action": "sts:AssumeRole",
|
||||||
|
"Principal": {
|
||||||
|
"Service": "lambda.amazonaws.com"
|
||||||
|
},
|
||||||
|
"Effect": "Allow",
|
||||||
|
"Sid": ""
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "aws_lambda_function" "test_lambda" {
|
||||||
|
s3_bucket = "${data.aws_s3_object.lambda.bucket}"
|
||||||
|
s3_key = "${data.aws_s3_object.lambda.key}"
|
||||||
|
s3_object_version = "${data.aws_s3_object.lambda.version_id}"
|
||||||
|
function_name = "lambda_function_name"
|
||||||
|
role = "${aws_iam_role.iam_for_lambda.arn}"
|
||||||
|
handler = "exports.test"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Argument Reference
|
||||||
|
|
||||||
|
The following arguments are supported:
|
||||||
|
|
||||||
|
* `bucket` - (Required) The name of the bucket to read the object from
|
||||||
|
* `key` - (Required) The full path to the object inside the bucket
|
||||||
|
* `version_id` - (Optional) Specific version ID of the object returned (defaults to latest version)
|
||||||
|
|
||||||
|
## Attributes Reference
|
||||||
|
|
||||||
|
The following attributes are exported:
|
||||||
|
|
||||||
|
* `body` - Object data (see **limitations above** to understand cases in which this field is actually available)
|
||||||
|
* `cache_control` - Specifies caching behavior along the request/reply chain.
|
||||||
|
* `content_disposition` - Specifies presentational information for the object.
|
||||||
|
* `content_encoding` - Specifies what content encodings have been applied to the object and thus what decoding mechanisms must be applied to obtain the media-type referenced by the Content-Type header field.
|
||||||
|
* `content_language` - The language the content is in.
|
||||||
|
* `content_length` - Size of the body in bytes.
|
||||||
|
* `content_type` - A standard MIME type describing the format of the object data.
|
||||||
|
* `etag` - [ETag](https://en.wikipedia.org/wiki/HTTP_ETag) generated for the object (an MD5 sum of the object content in case it's not encrypted)
|
||||||
|
* `expiration` - If the object expiration is configured (see [object lifecycle management](http://docs.aws.amazon.com/AmazonS3/latest/dev/object-lifecycle-mgmt.html)), the field includes this header. It includes the expiry-date and rule-id key value pairs providing object expiration information. The value of the rule-id is URL encoded.
|
||||||
|
* `expires` - The date and time at which the object is no longer cacheable.
|
||||||
|
* `last_modified` - Last modified date of the object in RFC1123 format (e.g. `Mon, 02 Jan 2006 15:04:05 MST`)
|
||||||
|
* `metadata` - A map of metadata stored with the object in S3
|
||||||
|
* `server_side_encryption` - If the object is stored using server-side encryption (KMS or Amazon S3-managed encryption key), this field includes the chosen encryption and algorithm used.
|
||||||
|
* `sse_kms_key_id` - If present, specifies the ID of the Key Management Service (KMS) master encryption key that was used for the object.
|
||||||
|
* `storage_class` - [Storage class](http://docs.aws.amazon.com/AmazonS3/latest/dev/storage-class-intro.html) information of the object. Available for all objects except for `Standard` storage class objects.
|
||||||
|
* `version_id` - The latest version ID of the object returned.
|
||||||
|
* `website_redirect_location` - If the bucket is configured as a website, redirects requests for this object to another object in the same bucket or to an external URL. Amazon S3 stores the value of this header in the object metadata.
|
|
@ -19,10 +19,12 @@
|
||||||
<li<%= sidebar_current("docs-aws-datasource-availability-zones") %>>
|
<li<%= sidebar_current("docs-aws-datasource-availability-zones") %>>
|
||||||
<a href="/docs/providers/aws/d/availability_zones.html">aws_availability_zones</a>
|
<a href="/docs/providers/aws/d/availability_zones.html">aws_availability_zones</a>
|
||||||
</li>
|
</li>
|
||||||
|
|
||||||
<li<%= sidebar_current("docs-aws-datasource-iam-policy-document") %>>
|
<li<%= sidebar_current("docs-aws-datasource-iam-policy-document") %>>
|
||||||
<a href="/docs/providers/aws/d/iam_policy_document.html">aws_iam_policy_document</a>
|
<a href="/docs/providers/aws/d/iam_policy_document.html">aws_iam_policy_document</a>
|
||||||
</li>
|
</li>
|
||||||
|
<li<%= sidebar_current("docs-aws-datasource-s3-object") %>>
|
||||||
|
<a href="/docs/providers/aws/d/s3_object.html">aws_s3_object</a>
|
||||||
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
</li>
|
</li>
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue