diff --git a/builtin/providers/aws/resource_aws_s3_bucket_object.go b/builtin/providers/aws/resource_aws_s3_bucket_object.go index acd4296f7..b1c399dd1 100644 --- a/builtin/providers/aws/resource_aws_s3_bucket_object.go +++ b/builtin/providers/aws/resource_aws_s3_bucket_object.go @@ -27,6 +27,37 @@ func resourceAwsS3BucketObject() *schema.Resource { ForceNew: true, }, + "cache_control": &schema.Schema{ + Type: schema.TypeString, + Optional: true, + ForceNew: true, + }, + + "content_disposition": &schema.Schema{ + Type: schema.TypeString, + Optional: true, + ForceNew: true, + }, + + "content_encoding": &schema.Schema{ + Type: schema.TypeString, + Optional: true, + ForceNew: true, + }, + + "content_language": &schema.Schema{ + Type: schema.TypeString, + Optional: true, + ForceNew: true, + }, + + "content_type": &schema.Schema{ + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Computed: true, + }, + "key": &schema.Schema{ Type: schema.TypeString, Required: true, @@ -74,16 +105,36 @@ func resourceAwsS3BucketObjectPut(d *schema.ResourceData, meta interface{}) erro content := v.(string) body = bytes.NewReader([]byte(content)) } else { + return fmt.Errorf("Must specify \"source\" or \"content\" field") } + putInput := &s3.PutObjectInput{ + Bucket: aws.String(bucket), + Key: aws.String(key), + Body: body, + } - resp, err := s3conn.PutObject( - &s3.PutObjectInput{ - Bucket: aws.String(bucket), - Key: aws.String(key), - Body: body, - }) + if v, ok := d.GetOk("cache_control"); ok { + putInput.CacheControl = aws.String(v.(string)) + } + if v, ok := d.GetOk("content_type"); ok { + putInput.ContentType = aws.String(v.(string)) + } + + if v, ok := d.GetOk("content_encoding"); ok { + putInput.ContentEncoding = aws.String(v.(string)) + } + + if v, ok := d.GetOk("content_language"); ok { + putInput.ContentLanguage = aws.String(v.(string)) + } + + if v, ok := d.GetOk("content_disposition"); ok { + putInput.ContentDisposition = aws.String(v.(string)) + } + + resp, err := s3conn.PutObject(putInput) if err != nil { return fmt.Errorf("Error putting object in S3 bucket (%s): %s", bucket, err) } @@ -117,6 +168,12 @@ func resourceAwsS3BucketObjectRead(d *schema.ResourceData, meta interface{}) err return err } + d.Set("cache_control", resp.CacheControl) + d.Set("content_disposition", resp.ContentDisposition) + d.Set("content_encoding", resp.ContentEncoding) + d.Set("content_language", resp.ContentLanguage) + d.Set("content_type", resp.ContentType) + log.Printf("[DEBUG] Reading S3 Bucket Object meta: %s", resp) return nil } @@ -137,4 +194,3 @@ func resourceAwsS3BucketObjectDelete(d *schema.ResourceData, meta interface{}) e } return nil } - diff --git a/builtin/providers/aws/resource_aws_s3_bucket_object_test.go b/builtin/providers/aws/resource_aws_s3_bucket_object_test.go index 0e0651ad0..ea28f9d37 100644 --- a/builtin/providers/aws/resource_aws_s3_bucket_object_test.go +++ b/builtin/providers/aws/resource_aws_s3_bucket_object_test.go @@ -55,6 +55,31 @@ func TestAccAWSS3BucketObject_content(t *testing.T) { }) } +func TestAccAWSS3BucketObject_withContentCharacteristics(t *testing.T) { + // first write some data to the tempfile just so it's not 0 bytes. + ioutil.WriteFile(tf.Name(), []byte("{anything will do }"), 0644) + resource.Test(t, resource.TestCase{ + PreCheck: func() { + if err != nil { + panic(err) + } + testAccPreCheck(t) + }, + Providers: testAccProviders, + CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccAWSS3BucketObjectConfig_withContentCharacteristics, + Check: resource.ComposeTestCheckFunc( + testAccCheckAWSS3BucketObjectExists("aws_s3_bucket_object.object"), + resource.TestCheckResourceAttr( + "aws_s3_bucket_object.object", "content_type", "binary/octet-stream"), + ), + }, + }, + }) +} + func testAccCheckAWSS3BucketObjectDestroy(s *terraform.State) error { s3conn := testAccProvider.Meta().(*AWSClient).s3conn @@ -110,9 +135,24 @@ resource "aws_s3_bucket" "object_bucket" { bucket = "tf-object-test-bucket-%d" } resource "aws_s3_bucket_object" "object" { - bucket = "${aws_s3_bucket.object_bucket.bucket}" - key = "test-key" - source = "%s" + bucket = "${aws_s3_bucket.object_bucket.bucket}" + key = "test-key" + source = "%s" + content_type = "binary/octet-stream" +} +`, randomBucket, tf.Name()) + +var testAccAWSS3BucketObjectConfig_withContentCharacteristics = fmt.Sprintf(` +resource "aws_s3_bucket" "object_bucket_2" { + bucket = "tf-object-test-bucket-%d" +} + +resource "aws_s3_bucket_object" "object" { + bucket = "${aws_s3_bucket.object_bucket_2.bucket}" + key = "test-key" + source = "%s" + content_language = "en" + content_type = "binary/octet-stream" } `, randomBucket, tf.Name()) @@ -126,4 +166,3 @@ resource "aws_s3_bucket_object" "object" { content = "some_bucket_content" } `, randomBucket) - diff --git a/website/source/docs/providers/aws/r/s3_bucket_object.html.markdown b/website/source/docs/providers/aws/r/s3_bucket_object.html.markdown index 14286a603..bd54047d6 100644 --- a/website/source/docs/providers/aws/r/s3_bucket_object.html.markdown +++ b/website/source/docs/providers/aws/r/s3_bucket_object.html.markdown @@ -28,8 +28,13 @@ The following arguments are supported: * `bucket` - (Required) The name of the bucket to put the file in. * `key` - (Required) The name of the object once it is in the bucket. -* `source` - (Required unless `content` given) The path to the source file being uploaded to the bucket. +* `source` - (Required) The path to the source file being uploaded to the bucket. * `content` - (Required unless `source` given) The literal content being uploaded to the bucket. +* `cache_control` - (Optional) Specifies caching behavior along the request/reply chain Read [w3c cache_control](http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9) for futher details. +* `content_disposition` - (Optional) Specifies presentational information for the object. Read [wc3 content_disposition](http://www.w3.org/Protocols/rfc2616/rfc2616-sec19.html#sec19.5.1) for further information. +* `content_encoding` - (Optional) Specifies what content encodings have been applied to the object and thus what decoding mechanisms must be applied to obtain the media-type referenced by the Content-Type header field. Read [w3c content encoding](http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11) for further information. +* `content_language` - (Optional) The language the content is in e.g. en-US or en-GB. +* `content_type` - (Optional) A standard MIME type describing the format of the object data, e.g. application/octet-stream. All Valid MIME Types are valid for this input. Either `source` or `content` must be provided to specify the bucket content. These two arguments are mutually-exclusive.