diff --git a/builtin/providers/aws/resource_aws_s3_bucket_object.go b/builtin/providers/aws/resource_aws_s3_bucket_object.go index 9d46952d0..8a2e8370b 100644 --- a/builtin/providers/aws/resource_aws_s3_bucket_object.go +++ b/builtin/providers/aws/resource_aws_s3_bucket_object.go @@ -4,6 +4,8 @@ import ( "fmt" "log" "os" + "io" + "bytes" "github.com/hashicorp/terraform/helper/schema" @@ -34,10 +36,18 @@ func resourceAwsS3BucketObject() *schema.Resource { "source": &schema.Schema{ Type: schema.TypeString, - Required: true, + Optional: true, ForceNew: true, + ConflictsWith: []string{"content"}, }, + "content": &schema.Schema{ + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ConflictsWith: []string{"source"}, + }, + "etag": &schema.Schema{ Type: schema.TypeString, Computed: true, @@ -51,19 +61,28 @@ func resourceAwsS3BucketObjectPut(d *schema.ResourceData, meta interface{}) erro bucket := d.Get("bucket").(string) key := d.Get("key").(string) - source := d.Get("source").(string) + var body io.ReadSeeker - file, err := os.Open(source) + if v, ok := d.GetOk("source"); ok { + source := v.(string) + file, err := os.Open(source) + if err != nil { + return fmt.Errorf("Error opening S3 bucket object source (%s): %s", source, err) + } - if err != nil { - return fmt.Errorf("Error opening S3 bucket object source (%s): %s", source, err) + body = file + } else if v, ok := d.GetOk("content"); ok { + content := v.(string) + body = bytes.NewReader([]byte(content)) + } else { + return fmt.Errorf("Must specify \"source\" or \"content\" field") } resp, err := s3conn.PutObject( &s3.PutObjectInput{ Bucket: aws.String(bucket), Key: aws.String(key), - Body: file, + Body: body, }) if err != nil { diff --git a/builtin/providers/aws/resource_aws_s3_bucket_object_test.go b/builtin/providers/aws/resource_aws_s3_bucket_object_test.go index 4f947736a..6311dd7c3 100644 --- a/builtin/providers/aws/resource_aws_s3_bucket_object_test.go +++ b/builtin/providers/aws/resource_aws_s3_bucket_object_test.go @@ -15,7 +15,7 @@ import ( var tf, err = ioutil.TempFile("", "tf") -func TestAccAWSS3BucketObject_basic(t *testing.T) { +func TestAccAWSS3BucketObject_source(t *testing.T) { // first write some data to the tempfile just so it's not 0 bytes. ioutil.WriteFile(tf.Name(), []byte("{anything will do }"), 0644) resource.Test(t, resource.TestCase{ @@ -29,7 +29,26 @@ func TestAccAWSS3BucketObject_basic(t *testing.T) { CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, Steps: []resource.TestStep{ resource.TestStep{ - Config: testAccAWSS3BucketObjectConfig, + Config: testAccAWSS3BucketObjectConfigSource, + Check: testAccCheckAWSS3BucketObjectExists("aws_s3_bucket_object.object"), + }, + }, + }) +} + +func TestAccAWSS3BucketObject_content(t *testing.T) { + resource.Test(t, resource.TestCase{ + PreCheck: func() { + if err != nil { + panic(err) + } + testAccPreCheck(t) + }, + Providers: testAccProviders, + CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccAWSS3BucketObjectConfigContent, Check: testAccCheckAWSS3BucketObjectExists("aws_s3_bucket_object.object"), }, }, @@ -86,7 +105,7 @@ func testAccCheckAWSS3BucketObjectExists(n string) resource.TestCheckFunc { } var randomBucket = randInt -var testAccAWSS3BucketObjectConfig = fmt.Sprintf(` +var testAccAWSS3BucketObjectConfigSource = fmt.Sprintf(` resource "aws_s3_bucket" "object_bucket" { bucket = "tf-object-test-bucket-%d" } @@ -97,3 +116,17 @@ resource "aws_s3_bucket_object" "object" { source = "%s" } `, randomBucket, tf.Name()) + + +var testAccAWSS3BucketObjectConfigContent = fmt.Sprintf(` +resource "aws_s3_bucket" "object_bucket" { + bucket = "tf-object-test-bucket-%d" +} + +resource "aws_s3_bucket_object" "object" { + bucket = "${aws_s3_bucket.object_bucket.bucket}" + key = "test-key" + content = "some_bucket_content" +} +`, randomBucket) +